var/home/core/zuul-output/0000755000175000017500000000000015140314060014517 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140336601015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000363720015140336451020263 0ustar corecore)ikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB i?YʋI_翪|mvşo#oVݏKf+ovpZj!Kޒ/h3_.gSeq5v(×_~^ǿq]n>߮}+ԏbś E^"Y^-Vۋz7wH׋0g"ŒGǯguz|ny;#)a "b BLc?^^4[ftlR%KF^j 8DΆgS^Kz۞_W#|`zIlp_@oEy5 fs&2x*g+W4m ɭiE߳Kfn!#Šgv cXk?`;'`&R7߿YM1FJdl!و4Gf#C2lIw]BPIjfkAubTI *JB4?PxQs# `LK3@g(C U {oLtiGgz֝$,z'vǛVB} eRB0R딏]dP>Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9ӋrYWoxCNQWs]8M%3KpNGIrND}2SRCK.(^$0^@hH9%!40Jm>*Kdg?y7|&#)3+o,2s%R>!%*XC7Ln* wCƕH#FLzsѹ Xߛk׹1{,wŻ4v+(n^RϚOGO;5p Cj·1z_j( ,"z-Ee}t(QCuˠMkmi+2z5iݸ6C~z+_Ex$\}*9h>t m2m`QɢJ[a|$ᑨj:D+ʎ; 9Gacm_jY-y`)͐o΁GWo(C U ?}aK+d&?>Y;ufʕ"uZ0EyT0: =XVy#iEW&q]#v0nFNV-9JrdK\D2s&[#bE(mV9ىN囋{V5e1߯F1>9r;:J_T{*T\hVQxi0LZD T{ /WHc&)_`i=į`PÝr JovJw`纪}PSSii4wT (Dnm_`c46A>hPr0ιӦ q:Np8>R'8::8g'h"M{qd 㦿GGk\(Rh07uB^WrN_Ŏ6W>Bߔ)bQ) <4G0 C.iTEZ{(¥:-³xlՐ0A_Fݗw)(c>bugbǎ\J;tf*H7(?PЃkLM)}?=XkLd. yK>"dgӦ{ qke5@eTR BgT9(TڢKBEV*DDQ$3gFfThmIjh}iL;R:7A}Ss8ҧ ΁weor(Ё^g׬JyU{v3Fxlţ@U5$&~ay\CJ68?%tS KK3,87'T`ɻaNhIcn#T[2XDRcm0TJ#r)٧4!)'qϷכrTMiHe1[7c(+!C[~&M){ߘ>:i V4nQi1h$Zb)ŠȃAݢCj|<~cQ7Q!q/pCTSqQyN,QEFKB] 4&X(q8e&щu##Ct9Btka7v Ө⸇N~AE6xd~?D ^`wC4na~Uc)(l fJw>]cNdusmUSTYh>Eeք DKiPo`3 aezH5^n(}+~hX(d#iI@YUXPKL:3LVY~,nbW;W8QufiŒSq3<uqMQhiae̱F+,~Mn3 09WAu@>4Cr+N\9fǶy{0$Swwu,4iL%8nFВFL2#h5+C:D6A@5D!p=T,ښVcX㯡`2\fIԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZx[|mi ǿ/&GioWiO[BdG.*)Ym<`-RAJLڈ}D1ykd7"/6sF%%´ƭ*( :xB_2YKoSrm_7dPΣ|ͣn/𚃚p9w#z A7yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^o(מO80$QxBcXE ء\G=~j{Mܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}S^vv^2M!.xR0I(P 'fΑQ)ۢWP Pe>F=>l |fͨ3|'_iMcĚIdo阊;md^6%rd9#_v2:Y`&US tDkQ;>" ء:9_))wF|;~(XA PLjy*#etĨB$"xㄡʪMc~)j 1駭~բ>XiN .U轋RQ'Vt3,F3,#Y3,kJ3,LhVnKauomˠ_>2h-/ ђ(9Uq EmFjq1jX]DןR24d X3[n )ܗKj/jUSsȕD $([LH%xa1yrO`>EE衢^}p/:F?}bi0>Oh%\x(bdF"F 'u Qx`j#(g6zƯRo(lџŤnE7^k(|(4s\9#.\r= (mO(f=rWmd'rDZ~;o\mkmB`s ~7!GdјCyEߖs|n|zu0VhI|/{}BC6q>HĜ]Xgy G[Ŷ.|37xo=N4wjDH>:&EOΆ<䧊1v@b&툒f!yO){~%gq~.LK78F#E01g.u7^Ew_lv۠M0}qk:Lx%` urJp)>I(>z`{|puB"8#YkrZ .`h(eek[?̱ՒOOc&!dVzMEHH*V"MC Qؽ1Omsz/v0vȌJBIG,CNˆ-L{L #cNqgVR2r뭲⭊ڰ08uirP qNUӛ<|߈$m뫷dùB Z^-_dsz=F8jH˽&DUh+9k̈́W^̤F˖.kL5̻wS"!5<@&] WE\wMc%={_bD&k 5:lb69OBCC*Fn) u{Hk|v;tCl2m s]-$zQpɡr~]Si!ڣZmʢ鉗phw j8\c4>0` R?da,ȍ/ءfQ 2ؐfc}l 2窾ۉ1k;A@z>T+DE 6Хm<쉶K`'#NC5CL]5ݶI5XK.N)Q!>zt?zpPC ¶.vBTcm"Bsp rjﺧK]0/k<'dzM2dk–flE]_vE P / څZg`9r| 5W;`.4&XkĴp 6l0Cз5O[{B-bC\/`m(9A< f`mPіpNЦXn6g5m 7aTcTA,} q:|CBp_uFȆx6ڮܷnZ8dsMS^HэUlq 8\C[n膗:68DkM\7"Ǻzfbx]ۮC=1ÓOv$sY6eX%]Y{⦁# &SlM'iMJ았 t% ~@1c@K?k^rEXws zz.8`hiPܮbC7~n b?`CtjT6l>X+,Qb5ȳp`FMeXÅ0+!86{V5y8 M`_Uw ȗkU]a[.D}"\I5/1o٩|U戻,6t錳"EFk:ZM/!ݛ@pRu Iヵvyne 0=HH3n@.>C@{GP 9::3(6e™nvOσ =?6ͪ)Bppًu_w/m/0}T>CUX\!xl=ZVM\aٟ6h㗶E۶{O#X26.Fٱq1M k'JE%"2.*""]8yܑ4> 4yS*j) qQa% |`bEۈ8S 95JͩA3SX~߃ʟ~㍖›f!OI1R~-6͘!?/Vvot4~6I@GNݖ-m[d<-l9fbn,'eO2sٟ+AWzw A<4 }w"*mj8{ P&Y#ErwHhL2cPr Wҭюky7aXt?2 'so fnHXx1o@0TmBLi0lhѦ* _9[3L`I,|J @xS}NEij]Qexx*lJF#+L@-ՑQz֬]")JC])"K{v@`<ۃ7|qk" L+Y*Ha)j~pu7ި!:E#s:ic.XC^wT/]n2'>^&pnapckL>2QQWo/ݻ<̍8)r`F!Woc0Xq0 R' eQ&Aѣzvw=e&".awfShWjÅD0JkBh]s9Ą|ק_;%X6Q@d 8&a)a.#ۿD> vfA{$g ăyd) SK?ɧ"0(HKkD4<80: M:'֥P!r "Lӓݰ@ 9n# " $fGgKQӦ4}Gn\^=-Y5PI dPN6 Ozځ/פ|5) F[ڣ$2*%&h v%9HN H~Q+oi?&۳)-nqK?2ސv/3,9ҮT9Cef˝49i.2DxatC<8iR/ƬйR֌vN8J"iJ. T>)qaY4ͬlyg "]BvW#99`TegõII kюHLa^c&/H^FFIu`2a$mc Ry+R:LڕDܓ>Y:]t.+|PT6=qWe0NƏw<6o3mv8k vGOfpEOkÈWȤMف lOc;SR&.w,qk>MPs+Xh4iyuGRd֞q鮺]m S{}]U kV0/ŜxtADx"Xh4|;XSxߵă@pE:y]/"(MCG`ʶϊGi+39#gNZYE:Qw9muB`9`LDhs4Ǩ9S`EkM{zB<˙ik; JD;;3!4 2Y.$Dwiu|+lO:k$]ԜYLUҞ6EmH>azʳ/A+ԀZk"f`.,ל{=wh|_qYj5M{K$gv>cDp"'0޽5xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=n8D-9޵JKw5ُJ,􋃓ZUꋼ0b1f87GՂ 1t_o}{Mr7KO0Ao-Y*Is\S:JzA(:i!eҎ\,f+,Ąt78~ڋ~?[F^.A'!,iGow3{'YToҝf5ޓ[he>=7S8DGZ@-#]f:Tm?L{F-8G#%.fM8Y='gیl0HڜHLK'Cw#)krWIk<1څ 9abHl:b3LjOq͂Ӥ=u8#E2;|z꽐vɀi^lUt␚ɓW%OVc8|*yI0U=nFGA`IC8p+C:!}Nh,mn>_MGiq'N~|z`|mu}r:"KiyGҪ$& hw#4qn?ܶХfm_Ov^ܶ[6j3ZN9t9ZMMM)I[Rχ/C|W䳮yI3MڼH9iEG&V 'x`u.̀ab7V<*EzfH{]:*6M x-v쳎M'.hO3p-IGh ܆hR ]zi2hB9'S_;I/d0oIU:m/~[*K1QA="D:V&f:{7N>^uU` c/X)mS5KC߄":{H)"%byT:Pqҋh] H+&=>g| Z;D8ܶb:! Å{2:+au 6:!fF+0#+̬NY"!6a7#񕪰%:r|o5Znڧs?si/W qEU馥˟^_޶oڷOj'?nc]Rn\t3^邳塨Lɏ"8k8M~?M}OAH$77f|lgn I;.K*!<+"eK5c&`X:#;@B@[(K44sBFu M.MNWLlY]K᜴=/ VމYlϿ4i36$>m|_>9|dUA"{!$jKx E$K3hN(tÊ-#v#O N, 9g80Ǭ&VdӞ5W1!1KYd`,-*&>F~⯰&jb.~cNk BL_OG]Bv.A|'qT(Ol.' 4IE|@Iі)<-p JkQm1 `qacܗVc?)cl*&<}P媠E{-sVU>߇GUt\+n3X]Byoz)li$2cPs6D>TE-n# rve{椱I |p)U݋7yJw&PzDgi xs  xh\L r Ѥo Zt(I >|$>tnMdэo04SRm+0^PTi-"] O('@BKD6 {NmʐzRj.aQcb^CZ-uvpr CѐٱlGNzIveca=%1Qi F>wTLHUGӃ\sA֎Xpljlv ^tSȻ \cPwίwX"{>9V0ټ_`#U8VdTtD_GU9V ұ{q:ObUi7s )B ۊZlzIA4S#x,T3ѱ ԶJ=rs>Nb: Q6ˌ߉J%.Dl2ȱ%ܱ&6XƟ6qg(USok+Po$lwvmi8W_VT18V =| ub6QWCnY'"*aN08wuSEAVخ m3 o\` sHc# fqT .,ŀU|⦍߶/*~48âF,#[:y_YIpʼn)dk!J'Z5=r&; (y*b*O_ULT.ÔD[%s1,jЅ@k0Ցu֯dtKl$Y5O*GUڇvI`b0ο0~oI`b#FOf_$0!i rS/wvҍ%Eb/Ec|U9F-)L)ŘF`U:VK jeFrԋ7EDYpԽ.D\dNyj荊EEg]bÔF˩ք%EGƶ*NX)Hc(<|q@Oޯr^3>Uf1w;mCja:-1_k٘%VbZ˙#G6 `q+MPU~l!.?I_Pĝ"] rT [eTr؟˰ ]\ h! v˱>5S1px fnk}sRmA>d2UAkؖvlX܇Bz1U_#Xӫ+al H d\k/I,k,ρ|`zR/$@8VU^rcG"E7\qtS:ڝUyy >Vc11*?xYa8U`Jw/AcL~|;yj8TR#s"Q.ϊ/Yrx+u6*27fǪC%+A~*Zآ'ѭnۡ|< a1s\ T5҃FZh?EV"sd!@БU ^p%pO3|B5=2怕nwRqR9~ i±za+HFNi>. EWz:V^&YEs5Ȭ N *7{!fRБBSۘ† Er/IGU}APQT]|XN X]FbKjKdO U6[3TTX)|*H'2U0:VunBl  `5/@ա06VNO8VGON@KgjyK?Wq1egI+ I.*F~L!Gf"LD&U 6tGd#fR*c ^tSLjnKS9 Ȼ \ >lr&}+̼d"I va,Jm_u)d靕َ| Vw85F3Liƙb<;dM-})C?Fw*IJ_3UG'+¨[9| >80\+ xJpΕ`p~mg˗%F Rg(6=/r+%a>w Ohght uЍaRs ^d6GXAf?V_mW puȇ S:tŴvŀU#-*mZ5k5r)_x*8ͼx@(k:_TX%[paRu~}#Ѥr %A%`;MxB[CzR怕#H% }8@*AM.SEhd,rKrʇ)br\+! s1CtӒNc_:F*`Nv;ogQFa2V%ZniE|nZ&-I,t*ώlo Lhnٓ'Xm R ˍ-~ά}hs\5TT%~am.>!LcoJrKmqvez܅E9t6FZXgsreHhlٷ+ [}r:̓?W~e6>0E8`Jq-(ed;W¨:Ä&]䒿e;0:|$Ȃ1L-%;Ƅ{dɱL;V[bp>!n&աIJX1$9;[?- й vRCxKVV+#lj@_RL;IQ8ŢΌXD@Z< (1ZRÜ:OUM/vư{'jYXE4S/8 7: `/ +G\ U>]B2/n2=8) B gJ3bcKo̹ʇ\B~Is 2sO/I!}xV&\b<9$4Nve^آ]$LGF@LjKٕyzH 31Հm-XıUXF|\A-2) ' RG6h?āUŔyj[j_ӂ~ яA弆^bDyzǖQ8`jXbsK?l58,?YP5䜭ve9YFznTEf3Ja\,@2,?WYؾNr<V` =V[oB5!Z\ļǪЎr8@*ucѡv\[|s L-+y{5K@dzp`r"mũɸHNd"yc Pu>x2;W`_VR<aӗ&D<=h-Rר|/r _ǖھcߖ]G@Ն;UQG1 '3Jە Q88ASUȿ!:WѥLf21;d9OU᧯MR3V:<}xXh//T+coY5Ȧ4/m0NE(G2[+G~H'5ipӘ͏O +Px SPp.,?Uv|$Ҹwm#_} %^#3cd7fS3[ERnY$,vvX]]Gw4q=J"<m1-q-@Ƿ1"uyg1i02[ mݴ,9a3; y f~2M'i y g]4|=(QL8B ;lMU&$dV](xDe>@{X祼!Xf k߶{OGdM+AyTj2 <E)Q( `2|5,F/y UY$ >9Bc>gr'PX2a~GRfu]T't7E5yuUMڧϷ$X*{#(Q bgjc񳥃&]R]'Zؔ{XBV~Yfp`Vުa~8IPL9 nĭw?Y45@:_r v|EO¹~BaK+9??e <̓-û+A~3,3hKDT7W76|֝Gݕ4m?}dq %?A#zF"I/;S궞͹k;<_rIx9'~zx[S֓/xyVH-غ@=ݿeQsTKѹ lWGsZuf,` gB&g^ UDc(Ղ:bxS{Z޿DVU)kWx- nJ%Ϫ{睜r kCطʳ[#v1( OĵD^8|Cl .SFɪir)atBczFXe"'}hn ª k)V׼P3Ϲ/)iXRz"ZTύЧ6³{~ LM%,гQ<'j<1q Djki\KLASV -gn[3hن%[VL& S-Tֆ11R3TV_| l3UFF&˼1*G00HWˎThx:Y\. ']S UGw.HP]PKe*v CX_eځ{H}ZI-tA̳p+ Ծ΃VPU%!e߷7hJF0[ןC&4Uk.}*bGC)!3\OMgK,N HDMժH R/6“e͠VeX!ZQL{b6UMb6 t1:uL]Դj7dž ħXZm/ 2O^c_K9>/'yuL4ZDp j#&}P̶L}sMZWlyj 4H`P!ulWMч~BѴ$6ǺXo5&s^[xTǷi܏"Z^v`X6|7@xoF1,lXp]z4rt5Ko^&akCgjYf%9Pz^f멋_k`nIz TCDʓtnL٪z*d=<:Yr[Nq9DUޒ`%f2lt 鮌x|7&cme.F57ץli<-EC$ڞ!Σ'cѽC>o3U<|wFWi2==\ F_yz۝vxdy&I2UP8̾Oa, Rm4 HՃw/ 0d"Oc1=[E+I!wWE壻ԃ.B>083O V]sm1۸ νa7ֺNn(>o3Mo BGo7#݇A5tsf8L_`kĀf: @?|!3Ck8Lڍ4=Lsg0ځ6l͏hL4 RW8ft=0x2`Oaڽ&,ca_3\e;3YfbPS%E>,Bo5@;2ςY gvfb^4ۉq! ƨT6Y(z0{mW !=ݫlR_l6m^-Ymm'Is7CJ28*z6ď p83J؋V`=z vt>D009cu83F-Bkq(T+{w<Z݋ZUDN*[*&uYel4}h$@+o_k^U(r6\-=/'/Eu<#LzfϤ@j,0Ow?џY *Y]vl_M'*X˞UYI,ts}w[gKyU' BSUUN/dU6x-ylg]&ѭ@Hl)&1hɕҖ+y`ŸG}|Td>;`BD0];gpPFx@&hMH;@1t얌`ݒ7kFr7Ԣ~) NSB;v ȝ#Iz"'~@@Q"2̚У[?(  No06h{98GhB#N^ױo>];##ErHy`;jQ1>E gCP|g[лeewC$x&ps7Pߢ oاD5sw3:E(ww߀0 2ҩ?`6Д̙H P$#~߲0RW(Sb~_[U:yp>vDZ1iBI6 } 0n އzre£^{~{ٔvDA2,p~h?G-x&a^^(WL]yVfg 2.==5S"6u{bҩB $f8ur.@p<#]p"^r]?ўޥ?27b ~`[0 P[_1Bx>W0Vߙg0-yU@YJp7fv!؎Cq0nEh} ӫxʅ!BC9"B$YG^3a.Cރ\Ҿ$kS ta-d*_wHGMx'!pB$K&@ /`.!CH R M2DtR=9<POBfw72vA_0JtxCWuTM ͍<4`ZF7"E|:*q#.%35Hd@׶phepDC(I"p$]!3_Ye*.o ^:Be*2~n/`FC(gQFLthT "#8HgۈEZ[BnX56d_ff@˻ӊ״]mbC吁pի[zr7n5҇ht]ؐ;iߤdsG_mXPz$Vyћa9lh G[Nл򦂝bR?G|) ̗13#( F$p&߃>Uᡸ}єF'ٸwxU؟_ _]hM/`WQey;LW)QO"*oW-E2gyp*3$وA̓,!Rcwgqf{! z<(tiJaBOm.e&digL0M 5/3 _diypx.q+3ӎI=\Һ:\w) xzC!q?I 0\3E Q370Jzw O"M~O͉ʼ;X <[(wiТ(K3XJVT}tynb$YsH4+f`"1j͍J;}?M1&xnu5_qNK(2b>ETBB0}Y׉8[vnӫt U#(:^[/"#`zm/ ko$ov6ֆN[ݍ8h~m펢Qq@%[tZhPGYv)f6|#is<)*V40R9n.>żc ôqNvJԝ|l Rx[ꍬ:< ol1`c)꨹xK=$9l (`U ]آ)՟Aǀߚ[B:l BI(ۆ[CʶSAeOUae[jd3OG6SlA=HuI:>P[mA=I:?P[lAh= Ih|P|{B$oAX'TlA؞P4B# [nAh= Ih]Ⱥ@r,+CiOϳzFW[mkj;8C|<E8L\UQ&u, \x8w +$h,JրWXS{F(^8Q +M>d|yaL"gYjʟ/+$`^UFyyJ<&7l0 mJ^4([ A8zI?ɨr}⣺E>7 oѾ<`,2E2BU~g-d?.Կ8'.yVe)<.݃ \A5I35,LE`o1j?H!H5ܲKɾz x]0͵!/Vl% 20'(iK7xη" pZ/:6@Tկ0&y AĠ>?P4nqkV߿?:EٸiEv/V#RR@ %-uCd?ܖR𨹁lâ/s|uʀ\Ƹ 8eizYѢpyͭ3h_/WqQ&C Y"eZ֊fvwX!//)/iV=`V+קG9JCL:W0Y] @aUkJYhofU2RRe,3kbksߔqü (d$Χο^hp掫!PL[~ vef ;e“!|o6|zafAy4 ZqV0ק29wͪoKɳZ@5<-ע\b7H9H<+Rh?l.uuL!s)f#tzfnzd/.*B K F Cev<;**Y8+61nxTrRzs7MET|jh%sTZ0$HujKW2~m?G./aӾj u|3H)?v 3DN.jaKDl\:SeIitc$V{aހХ NjL&!U3J,ma{xp+@GhYV*j}ʹ] 5wpJfh/r>W2D=N4OIr.d p6 G))0a}er;-,ݵiVϩ+n]lDP͡ Z€ *y VV (~yFÉi͗Uh |mlӨ^uŨ@oW]~J?ӽ\Lp1ݴi6Z^u.eW1O;J,zKDE-ej:Vj5v#:}nߡ3'Tꧾ4~;v]N( :֊fiakg:X-XcE FM'gmN6-*UՊ%ꘆ\rt'{ҙhq}=671QIJymurhoJu(6v8e^n'*Z&mL'58,N$Jֈ,*hu  1*Zyw͑3 *_~ڟ6f2R広 0aG*WlC>kH3QlҡI|xXzWsk] BvMbZ'bJb~RRnp6+ 2Ay Yke FIy>ɲ9Z?"W- *P6#z:_ Ou/Xmy_2#bHZo!)3N4`ȶTu&FӟwjKo nnXVw>ax6~8eeX랾e뗛X=5o?ZGM\mݼRNgid*|;{+%>ͣwcӼ+x훸$ve~\_4 ~i)땖sVFz?[~3|ay3G|՟_([ۤ' F!PZjxo)=2gx*1|Q{]7bM$n6tAhܒlI?v% O|[o\I*9h<8]ѩdHv>PT>h| jX`[d-#7MM4ҷxfm(F3_`Y2JZQHSc&`Ks@{. ? UjNYPp"hpT&(ba tRbLĠ@ QW;#b'&m% !fGgC .gᾆkE4(8BKO66(ָޤ}rPs3(Ak]Eh!L<s]pmbcT 56@\Af:k.hH~Bqu0=`4Ɇ0Z-.2%sO2jc%`H kEdÜDY"CpB鐉"^w _#$_f:Q5^ꠒ7 ܤ:,B Ê_I R$(&G/ʭ9tfo{Jbaxg+FC(GRd# vra#1zULB7"M*reDD7 çQUc-ά`*3q_;&i E5?b&R<3o9@eQpj&OSFΠc0de N`@W2{cHS@ cDf1bxqYLIS@ΠcCd)]5)"qUVqVKL@(mbUa-OeȬR@WNթYU|%U(k^.W/'GӀk]h>Ƨn~]ahC*95/h) |ʋ>ٱfCע[Qb*1\z #hE̾u"Z{ =bܛ Cǽwa̝پѳvPg]Ļ_2Ȩ6jb>" ZkfKKFz1B.Y1RG@ݡċ?JP}uiEjj:ċi] .(ugNҤw[H{p/V)nyA>6*8˭% 3TXS}EROՓc/B!P/t`ͨ09#HU+$NdF%(~4E Cwyuh흚vkek{`g]Zd6xU!u0"[A M(NBޤ.8*?#,GuL * +t?ܨnm[}[(Q=(jmºdE2-?v=e? S ѵ(*Rbh^uP1)mqlo~复b#o7^oއ>5v4bpUc\  hVg/Vd4 )Je^}vqp!ZTh`:hRP$8:BUR<̜ks}{֝_x2g`Za$#YJBT^R$ۥ k$.Z4J_@EJιĞu$V~t"@JN{ 8d*`**L|QhƋ̛!Htr?e&zaU^'z9jp(sa[(_V ue%^c\H@5k6I'㮾.8wQ|p-vT@76'Z$ )cDn^d9KkAvj:E7ũ jb<(|iV+H ~/TT[ [85Z5è<,8T MM-& "9Nwq0@x AO$`z**VjŞ" R; b˼Zo閬у60͒5yRX+us|cMd%1{.NC> -F񰡵Ӡ3L^ԤԬT@:de=E^󺯆_j Or׫b<1 N1z|-r)U(W׫Z A5x/f"-{ trp!V%HFFSto0k̇"ѯŗ-kMBzb"D_O]p<~{.sV*LK.C/LiV_)K g{7::Wg D c.>N4~@"Ԩ?V]p<\j;iQKKBxYŨs'0Cco!-V ,+V n29ƒ+2Eiǃ>89g^'t✰==8ñ12H$Uw )p˙ }Q$h=/khKhWB|Q&x*B1ihi\>>M[]`c5_[)x es,J4w: MOaaӇ,zݯ~We QIxBpG|-m)6+:h|b&p|aОe#f0?-]vlu@m1 P"ebXů$qv" ]sBXa B̊-r*cX%H_ryQyʸF=~e U@;)Sln,+^JA$[,.ʈm;agcZCSd~c[m0ˈYbr:. bNǝ'X,@߆ŷdU6ݩp*P2߽Xusp/0 dttsWlηDVlpU4FjDضcB="I'%jC۲7[-@;^?wQ]V]b6VDɾ=v0G昍"P9ޫa1u1޲kna)'h ̛Ec`sYeg!UNtqsn"vʦ$b:`K̆)<0q4)enι0iU ̱ilg3xβg'n:Is3EL?FxC""a`x_V]#+ I+-O[U]pyʷLjuI.V-=}_t^{Brwc~Z_H -Ƴ5'%IMMS2B25Rf)lHD{=8!.h0(ԝ[:7Ѥe ø_Ik!dMIf.Bj-5m[wjpt*w<^WZAr < i@U^J/ehIp$S2γ=%I=u =e'שV/ےu$!!R 1`fs} z}aFxV.ܵǜ e'vj)UvePًb(~'= wk^g+͐"LÚ8.1t֜"i;>(!W=S}tT(iH,sųHv{-Vk.ؽ%eeMwP_ޭ!l!uKut)1\:7y3%hb{ 2am ]s7WT5d"JjS.ٓQMUqCRqXHŕ}M$դĶ4dx{ʛ<6&d2e V5n6maZUƴLQޟmG[uw9{uDʩ_JoY&efqh{„Nv8{QNCLFWeK`:~OYcس}`8Z$Qjԙtv`DI?v򒃝ɿ$~e#1e1xwL< G/I+kp6EQY|BNȏ^fhɚ]@waQ̜$9=bGb\^؋68:sy-4RB;xŏ?"GY8&l8{0x`Db %$"94%Xֵtj8WgYI 5@wR˟T$(*W0]Hz~uks=s{=džφ~W2+ίј<`KR~=wf~fK?͎x}g .xW%$/Lz5zXMfD|TJ }0K:J~Lo?x|1GO=i8h]_}F,n0x?u4:f>ۋ=!Ddo#xPSUa) K2ڠ %Q,X !;?˾QE[s߻G}*>xWv-=ǟS]7Ӈ? 㣟0h}GUg\uwo>ΉKֽ>IvߦV O`YnuaC7'P`.ADt̹Bj+3E{3A5lFwŻ?]^xoxnz9bώ[0a~ԅ}$"1H@^9W`\}wO+Yar}I4Wh2@9`wUFEܔs`╗)+`{SaόF{fh?:@{U0-+ⲱ?WWyU<@ i"%yW`Xq ۏflnw)O6 3 ރq?Up+Q{]Obh@_?/YQT/zVzuKT n֯ ˷u-EkpnId;"!(Ȓ7{_1)c'a4)DyRg?2$$Ľ RZܑ'>V/ˣ&ӉWP~U(J_#0>eΌMF.E\]&"?i샩G;UmUz)uC:˳.|whkDkcGݹ)0o@W٦(^jYQE]ܧ@i7ř3͆4c3g\OQ0rlNij!Y)/;f=tZp_ W'` GлӒ9`ҋI/.g_WjW=77CN1YT]SE?LUK-gU(~< \M"H\]nn5e$eJ!fd˝t2@WfG ." vOS$ s3batbRmvs,}_ֿtid~,o۩`߿ \ߚg9QZ)[͉߯5(.AmV("Yr$Q 34τ[YӏĤ{k_o fcr]5zVQE`[O .mH[/J+jPżgH֛kmiԷ(Gl=`:Zлڀ1-`V;h3VaqTH~-TA!n B+~ )~ iҴQKbFv2EBST n[PڥEen60i=*^x+&zE&O"P:NZAXXtúk$uG~nis{5&y3GǢ+W#V@i6Rֶ*nOqh{-F `L`!t` ט1Ku6Q@,QԢ5XH2%q/ً8V#*rޟ4tz=91ʗ^M7Mh!rp%S;8?L"EWz;["\^xû0G-hdg2vzkzN~$݅>D;q>T;gK b=5|[p.>N?,eu{/do ޽~,ݳwI!^pi"X =o~"P5p 'n ?,R}cOu['jv9>MeNNWR]O{KaMO2]Y @jɮG [0™~"gUGOo_v Zw3ZEjfwWbvjzQEJs %9F).j{oQJk[\ ^#!UX#hek'Ak2WiEY|*a,9a xTިWXN>KELb,rg#H:GĤYJ6.gp$ipW.h+0 inyHs?iGa0C,)@5k,45AU~lX "|ȝI /Q2 g1AA#V8yN/ ;j ;f0V`ؚ(V׌\q!p) +sC; qH1"ʥ5iG4i 0#ۣ ҃sICЬy%QPx)hLrar, @lyhQ3h)-0υy¼¼tIӒ:u1S;+ ,|jGͽLo0Cd(& lwjgZjǎRnIړg|x+ۣ9$Cgx0ͬ4HJl<]/Gj_xmճӚC9(Dj]pv+\ɊQFD:ew9}ٻwȝ7Vw0`e !|L-(o =@\o4ۋty?/0V+/+-gFTZ^]zʻF߅[_t0v&qЗ}U8/G^T1\%:3}JNe5+@==q8yhF{3J ṪU{ ̗9OmqPa?0ۥ +BTͤæÞVhkvlloZG?Zσ'(7_E[EEՊM*zRwݝ+ٚ# 왷X, *75r[F_G8VXgL!dwn$7#@L7袤R6L5᛻ uk-Ȗ/_r3o~~EKNKiZEFPA/t}P9cEKKp1t4KK1ab؛R"Bs?6ڽن0 ݶp+L%E!dڠH)T<č$=ߏur*bb)J+'&.䍷I+WgؒdE!}&.?0DjT;Drz(P n&QMܚ0kqt1# )AI۾"|e>/@I9|F&@Eķ>^U#6Zf+h,aC+\\By9p%0Gr+܁K r%2ր4U0w-. ۛ.D۳C1 FVy!rwu?#EΗ WnU # }Y .qwB\ăs|䞤R?ojJ 4j{+w#XU@jhʜqqgDjt8 ީXynڕljb2Q+㴲8S50 ۜPąpG7e &xVoE .Wr!8g{6W!áHS6OxH`]:Ȓ*JN fDٖM)$NDofgmgg3 6VKҒ}:F 3db)S͌T[q褂[.oLd)$*sگ8u46 %HQpqɜ9i+NuqF3wg4x_ &P_qg\R(('Vskk)4,G`iLamMwdX/Rjk}i'u>>6|tժ?~/Ձ@yʔrKrr߼I #ƬWfS2Gl6?fS"z% =AIؑQ{'!=ZL;ߺ .%ZF9yF/J3nkvnˇJ_^b[o'o=@ [#PFG(ݚhNP9pN٭%Nji ‰ *jc+jR$FTHSP%hztttBHsA "Vtoo}}wɮ|3BrSʄd iSLC`q*ZN;j4/UOj4Zs|R>RLB sꡧT?x/+iŎdgKr132 _pB7<y+@ocppvmXu}&j#w\-e_@ej#i8cK'i`]W7B+QtS(Ҡ4PTr'iudMr(xQcvɷPPnv + ~m$|KU}F!>Ivd8SL(L()9r*3Z<ҷڨLs}f}:Xr[69#%2%ȀOyN5iSe_8˝0=b,YHTfskĹ4Ƹh О`sRk[>%T r81%a:Pb3JیY-5(S'ڱD&F3Ffp)+j/iXIx}MUMj~㴛](u XA &#.ˎb^n|׎ 86Ks&#r\82hbYf1ᜦ6Mөon@j*q{$[&G`fp[PB |M(+iքcڣisқi ڛis\iXMbPJ۹2}_֟e!.JuM&uMR3zoyPRj6hCBw$uI`=jte>Z*]utX{\'DƊhQ>0㟬SuV&GP 0#).})40qzh1! Dz۩p;2`j.d׳`uӟpYof# %4!$hnG5OVӶo;Y`7/UB@Cm#nA`MҟwmWwqM؟<8"88l8HARnN«dA0e^4pCЭ19TrѪW#5uTC~w}i*]CV!bM):ض:s˨XenźA};O;O\Mu*hMSP sy€ q5S{Yq͓YTjCaF!yA޾Qf3GT!CjMa폆kqžϙ^v2g/mnyRNo2p^ߘD GfnK\UH@U\BM}3^9}XBFaXV zxǿ&棋'K$G="0PRJէ[!?}$,WY]Ҁ5 9ޅy"G#(ٯbHͪ:n~Uɫd2l>n\)%mBZ&d 6B_8 ]|03OfHsrUjɩ~bqFQE"Cʘip$~R7$Pg>' b̑3 dQ,yr"#Vt` b!g+~3y LNA"~SpM^oÌw?CKδQ6 p,\kK5=mV/Wt9a)0YxDp f:W34pE/]SFY&DX`Ro, 9-RH'"X 6İJ%;8dT{08PC§ C>x]g8㹏Үrg\qftބI^^O-w=%o^mr \\}ԇŴX8Q~uR{^(lWOswdOFO.a l,p{ }ԦrO[&s9q$fXKL)bӄj9UqyGlFIB4fFX#iی0&[#Şm,~Y1]d0nIs78v1ːSjж#tJAK52[|T,ʤ{QX_FkkH}wvGƯ޴hcp 7g.Z 9ʂgΆ~{1׸K0g;б\=%4uxJ;>){gio~^i"^n ^N 8 ~ "ra6[>nZ ^ TL 1wD:݃i˜Ǫ)֚-WO7+Rj)]9|kup-.7݈}@zְA=߽{+ *!B2bSL 1I,}ҿnbasaԎCGHW}LǼmXcWʋaIu7əod"bM HȈ"ο_r+nvK9]@/5xC$;C|*|?n!u{PɆ\j^v)S~Kʿ8#GtpT57ٗIޯ)սO}~meȎ^Q| {q]~ avwYn]JzfR"!YG]̳Fl$M"ES6ge~:#+_>?pݔ{8o f8w߶We*w\x{L;=mܝ![+эGpSQ9}x~tOR?_V׫U3q|eruw§]Hfޯ\UW sO ?)x_ElwޔJiHE\1~fwR~~WOޮ8ޯ7˿~\lYU=ߧ`Y߯>nv.ty߮[cV:>:urM> ohy{ rOLRSiZﻋQ]F.ߣ_2noA*k?+/lŋ4 OHQflJ9/efI?>uv//$SM*>/Y=ܮqGx]eV7#; ?8 oͶwKОi$N' rF#[h~I*gPL׷ݿ&΂$ ,o cܛ 8gRZIpђ&g%:)G:|buőOVƃ$XܼJj85-]%F9 xٵM:7E[ȧ>nϦQ+Fi׎jJokAzMa\D2 9GrTӼeWo^:Ii$pLB(sc|oo78EwȧݩH{sMaI4L-qp"3(wd@r 8;3d]7-|sJb%XLjsQC!XbPW\x#X$7,0- l6RŬ@# 뼽["R=_@Y샤6Riڀz~]} 8icx5- fO\BP>0|Hy d@Fk~#xtoD-A3̓##NGUj6&<"9Q#wvT }vx0T%&*#H .. mtT}6%n3Gl1+;ȧbk6` Ic}E籱H#x[ |hDr5gG$ IׁAOD跿ԣ7:uu )^}=<҄ pxDd#GxDOP$z26Yk?9Հyd7Қ\JG1;PȧB-ɾ()Yp"XQxGN@̢q" 3J~m{" L8m"jU{y1fmbTWE n (elgy(y@FF3@.KvբvIryXU*p/޸>cvqh\*2 $sэ𮟸~E]"jN=#3 ?2hls't>5y[p>#OKυ8t3E_٤|RQE|c(fHH sn7wQzc{嚲v].yDق RYiJ<L e|.|"Ueiyv}<@Wyc-#/uͰ#M@4>x'ܟ`~{31avs%K޴OZHikJ 6VRGU_?h~岐op7 hDݴ-"1֩h\%A#˭f$HFe2(Er½\!Vw#W(0+6Wwa\F-:{m/A#ad&1\iMɀ#qS~6`HV#>R(BG-s)I 5OɀVG#xtu4|**DS6$J3/ᷗ>+ƐOE0DJhI8KOBy+];E>]QXu:Š $п)x3I-{E>f!jF pl"V.+(BJ61fi7s"޵`ojsrl,ZȮu^Q(jf|_Z/R|DW Z6xbh%muK=h<]l(RF<2nI)we)m%FOܝWs=F@Ȩ90og,xJNCz3zq `˔XU|?8Lrr`0pҮS" rhL\v)XCKh)&X_Y-@OEs ZQ;{՛֩3$vcw^.ƞ/q7b`CñMҡtKjαZ$v=tTb,p^ܘD%8򩹣NCL/m2NS^A"_7E廿1ov=b6JߜE!!-*%ޤ|69h3UL|r虝4ul{ӥ9-ۂ"z[_j8cλp)d!dCKq$Ĭך 'L[m`ѭCOCQ*,jČi.jnXu [v Ť$+)AS7!z(Pe>;"[5 aX5И!JynwhK1Ųv0+PSq޽,YKe) (+#')u4d vO!Oٟ=`ȧX}I6T%`b\;M?^a>c\Q1^@ay}5$8qstFSը W{GwrUX&k\ d`: n';EO՞/&4 ~@uȨ'q:weո87yjۥF< f=[r@F! M10? ynqfuUT es1_:ErTPU=Q!TKG¦x3w k 51nh>`uP*]Ηv %|Je*f3M96Z)a@d08Jbe#LpmG| E>ra91wN0o숸\LzOp4Ф”nN,Şl2ڈG7b" 5ojg K܊G9g1dA,5Щi/i\cCqU.PeUՐޯ0K[E>Ǒ;N2~ ?ۈFc_@6~~܎ƗL,bF852 n|"䚞oXÈ482I=)QS!1Ɗ1kE'\wV/AcцUcJP1 -0c?L׶=Ҫxɼf̘ xOe ʮ*ђ@˼٨rƲٿmds7 ȨaЊGgl+\¬svjL3xh :evsJ2ӆXW&5ِo3y6wC>q]OE%I  pse.N2O'ZJȧjgØHaIxz]CrD.H Skp~dzl6|*Μe.R_.~η⻥ٜYXKp;k|I\x"Jp8TkQlɀC}(K}Wpn5~tMR܌cTudNg=z@:4c /Z3D+  8aVwaFOyNr na}$wKzu@*sz;dTsFm6R3NcUd@Fŷp7ra~cdTYcױ q.GMQ ]  F̐؇2jOR[4 34D Ie_f f b6qے"|XUVY>Tݶpn+NW\Cƙ/Fry ~_]i0㸇T<~2-@.TEt^x2gOl%d})+1+Tl*]?܍AS ȨxLf]^auZA]ߞY( z@H{SE4 8ct=b4fr@Fu\eW#ڠ̐5$ksW3ZyG F[8rPD:jN9 u%wbL KU< wd;j44VkA?6\N"pXAiɱET2|*g>%]-Q)(K\mo9*2 t<ԭj*m3U$ϩ]ӐJ1:GS+}"'"DԌe|\֑E7a:H_RqC)ֿo#7M͙&z v`,ْv]gv^FAfPS!l!$,V~]zlホ0`BրƍIXDxͣgQ XE(0t-=Sm#< b)wmX 3 ϿDUȧmiuXFՆ4?Vh3*K-1 2*7vƑ=q67sVUsŞf,I92*;CFap8TyCK܌T_^.4 f f7EM V8;)+BC53V䴬Vթ1(fzi,5֎92Ҕ鄔 m?GC:'q݃E> r1go{fczf׹.(5Zt(_誄Z9$`_k_~N*'ۮ_/W%84~D+CLT&8VNli0ͳ1n'f{&Vyxa5ow>%`.m<\s&rV nW/fDo5꺂_Y ꢪ.RdWy0phM+0K)`TtI޳8n#W}Y _"9.Hnn/A Hu;K=3GJ-S8=vEXŪb={ 2l{.XDwVbG><Ž_Ni:RA,3FRqϽ9U8u!/]4bѹ]oʿVW=\S>wlMt ?s)S.CBյ@&T..+sE~Ge/\rv|1a1Ґm<')F$ ReBQo6K\?K~Ҳ+d%mtAw_8|ův/6{ \)b+UΛ53|L+>hVڃ.&9݄),tS8hzb2sZP*Wu_r13ndE3 7WnLٛ URf$pL8ϚW̗yu# l$1IA}Ug#A Ґ&x7(Xs49seA aT>#ҢqDŽ;4Rw켢Yq̿-xˇ )#6ZZK$jGO0!.kbހbgkp3y*XZsuE/;e$iV) 8,A$#KCu&o /ܟx;ũ38v^VF55rHTԍg2|pLUwžM*?uh0|F'!$<e:EP3g I 0KWNlj*Ę`Y_x:'ȼwbLѢVE W҃8t+޺Xf+dֺt[(8'|<8t.rv{*KNf"X\ݿAˍͭ+yUrΨ$ARd(^N9-:a~KۃxZӼzOaanW7G^FSew矾k*G*/=FJF}d5pH³=c9RZTJL_@ro`v%V{VpiX&r2m$9J-X/>ojr * _%7aƯD!? p|լt*M+nZGϝ_~X(dQ('#DwHS+ˢ\.Z-= ܰPx+tkۗ' E"8QU.ty?1YRDdJ(1hCPOfa ޒo d] 22.,_A $V: n,w'02~XB*OxE6G$QJ6# 4ch䭫GH?(\^{p3"*Z3wY_jNN#bW@z CLFl%g45R~pf?A-R3P1)^-Ò\>3!4tZ\-+qR v"Fgtpɾ_l}N8xYQԓfCva~[*>N4w`S!C1i1&Zh̖,lpQz. ɛo.[S]sKr9e|w}~O4D㜝P{U8 X]>9* bǕG/pW]uօ0̂cK a Z;8.AyXBQH%Kq4E<\.0KI#Z_ ~HdC[W4|XyqW/Yyٗ 4-8}`(}х'Iǚ˜D *Q& a)%7Si,pBsja@o R efch : 2bK3&/A~ҁDxpB0e<%PD  FTt8EVW!dܖ\ !ruP8 Q2z*u[aFc4{qC.1 )νE._^q -J"ђ$&L(3f2I2ad']R:]|||31UTD,XRtM2r+>U|)@o) Q h31(*D1[ NYuyIfs]*[Ik^hhmnKEc qd$udQ^`2MbPkZ}GPZgހN]&\zC2z!(q̒UMC"CL[b&X^O0أ;>m_x48$ƍP "$סYzPaB?+xWzKQ`S=Q(4!P Y> FQa8hTڄ)-֧F1B͝YQ9%ʑ4FH$(T~^^,YbU'O%6҈as #_c(ZӘ=p)V LRV #8E"yhH!_1 #v<{ )XA)F9zR(DNzHW[ѻ[xD-wUm0Zs+h^q3 TT5ط>ϷgԁI|e\ZJU B̥Kr/4zcꠙ_<;=zy& $Zq+Xs?j=?yo G7s KBijH)fZog>譫 sAy=QD(%IDBJ.N jiD>zn/"D?E(ب|?sxt =4FŔDm=erLRdu Ě斳d~NCXXeǹ߂Tb^z.ڕ=.XV3fe͞\1<\8Tu9ArliȵX$wq_Oubl$\{vgq5]滲T|?>?k1ͻ4_ w-f2𷀶j5']ݵVqR-fU}pYEl2i<)mqNgq$7% |!q. p|/n M(tRøbH-3a^fXwdL7inr!ab"u_ Q-j1_g55Dh bR%sb| Jr*2YDt~a h|fgdz4h CQZ0DYйRVM!$IYaɍs_6E[ufa|]fap.E[4X +RoaJgX{ 3rv-HƈbŃ3ϰhhwp .tq?CłޜmO (Wm!hY^\Ӻ hdfh/_hA? fu~IڏkuʵnBC]uV'Suk"y1>3ԸrT.Y6d #动 p]!ôyCe!֏b]%W;rxƋys;[sLy/aGR(lxdQAc` ̑SrK oW+_қFT<,G䵭/6x2ݪXb'_~};JP)F\ݷK"<-bomk{.Ewak Ҫb Jބz?j1~QrN}tF0z5xYf +,`4x;RVViޕj> C/x2L㒞۬ h筷|1NRYWgd.#dRz l7<ݙѱYӌF':?ॅ.TAMKq!,21Eq AºQ.-&ɹ V3C$E8/"X$iz-\iE $ Z.N5:%c.%[\HLnp*{88y˥<0]+Gp`ԖyԖCGaˤX 9L > nsDy'aWҬxT.c\Aqe 6% )Oucyhèl֙H%}\ף/_]_|1_txq>k_W*%(Bc gPSFSOccy,=Xƍ==VdY?O>^'v]59H y7y]Ln%_Xײ9튩9`- w=%c@89r$]3<BOd^39aDKEjXRxUyd8'lTxoHsc_! v(#z?ڗazYp{(8 !SV 4j$*W0B;c*W7U =,ޭe̐c 2 <_|< 7,(y5UA9wVVϏ._ftbXeոYSkkV4%TƕmoAmoa=yqnDT#y}[]F^Un@TRg32Pqsb:6Un٤mÌ;0T"Z.O+R 7e^2L+ m*qL]sU:;g~`GU}$ȂeY^𗏵mnrh `VւsN)+XOlm\A|vWQ:n1V,yurô[7doYĎ%)oLlb6Ayz㏛¶޻}|0Pݯ]}[3<sh @lzA׼tl ]4ߎr)%$`nޡ|kkg1b`iOKEM̮|zEݱjs1~L")$!U$u2YU¯)~i1('&f"+O9Wfk3yď$"X>7GЫIs״_lo.0cH쩣 E9w;QQoG-HȁzNvθ#7"C2J{·dYO<l뜒{&lB^L]f;_?9mx˔G@h!Ȧ,UncEJRO'V!I8qm} L캱t\]cCCP./(]^rÖ.ך"PeS!JB KV}X_OI ͘;Bs4BO.nQA!.(GNpb(X'sVΙgB|ѵ` ٻ[\e~x~4KC睽_C> .31=YͩIF'6_u6AҰ)YU-vF|qm׳Gc,!~n%ҢW. $#ԁ)ÔV4:nLuuL|]LpvAUbq̺%R0#zByWR1eЦpt0kBfzY樞}^[+chڋEyBk 392>$Q.i̬^5sf1gl},ҘR\$T:Pe1RpӬpq-ބaU;k1w'Vs2ũKpIU5.uFK{+NƝrZ6#PGq5 Cm~8_ \[G>Qh9mqF@$SЙ%ÞU:!ZKR+ەSRJQr"TcVDZ@.X GPԌ񻔕!(wa[0[\»pp!8RP]2Q(n@ZL ֪lZ6P~Igv('rԢm}Jj˷ճuXk Y]WX|7cmoCۻwMj*m܏?ҟH1XA{S!0C"ŠȈ!wo;r7lt2~ͳ4'vY.4ϧw~Yyi{]CΛnf>2ٴcΊ|~?Nad; 15u!tz%0}ޞ߆~"V{Ef~0~1Fל0.>r/4V^XYH$sN2!9%4 "$+f_tuG"L]T}o0HmykQNyҦ"I  ƆREC bW%#cdv`tҦHtio5[{#za-h ^EvԼz?7݃KO&1ooFd8I\w8ͧ=#)LyUXI?d&z2v( @WC\2Xqͥq"QgTbRQ12LF -Kյc0R|ܳq%q: k|];Ԉ6 X.2屫#ypƟ,”Q mRCD́٥4rj(h%`&91].0l ;3 L>kWfFۨe^aE] K0%ĨS.ITQЊ{N"ss [78' <|-uPgN peZZ@02p 4&ye;v:Uדoz7=iüviC }C~ /. d/ʋ`CĎ"jfa)k`<QYY3'!U.cV>jb* m糈nФ|g"e>~f+`&<S1L;.\y6Ud ۬3ɲJ0iV`\}c2lRl;!zLɵOԀ&=VܱN:N_޲jfKզờ`F?N vbEޮ;LRŏfF oz#l%E:QCz t[eQ[8LUc@cз4zԏp"h KbA"w~yp8Ua| cy&B1 9WC\2#Hj*l4 /ӈ3b|D^X?&n?r13tu&.w1?!)+5"'Z(:0nB覾WQ?{WHnp`9!.}|(#K^Ig_bIVlݲg5Ύ|XzX,^c%={,tՋ~Ƙ7v;M'U8=4gSksP፬j<\X)iz n%A1C4\zMMhF.nڄKֲqCjnp;=igm1P-&.[<~kT&b0a%OİŚMhX%*}NBU< (ŁqqԣՎ1eTB ʐ vDQݗI뫖ikqEtɦqRsRrjMH$4Ql_M(ޱ&? #4E*?["yaiޠܩ 4aKlޛ5OL>kmti|?H́'j`N0H7ї lt-m80~j46fn%?Κ=ִx0Piݍ'aޢ~:ha9]h[<[Z;䇲ZZ^~Zn6;9]'Wo-ZdGevv#ԴnQx [} C!;kYGR噕!nk%.M}X,|ֿ_K$qz c< עsqlOm?U[ۘf}Ӯ~/',O< ;C#9\A$Wi M-_zsaΜ^qG'06T " B GM+eU-*~#o(`rG>[ʒhk Vy}Nгm!oWO镽goPH=Ie26R1)<(˗Iɼ+vaoCAvpn4?L3ڤ| D>J`()TU2i& K"OKJM&&aĜ}s  {<+ƛ,rWB|`@\YV䰶*i𿁑/m>iڳT5X]%sy+[ ?qn# ۦVm>yt;w&btW$Tך0~XP~u4EsPAϳ-s*{&'I63^YFrZ85\HLkB<.hMsQq S:oG8it܄c{u8G[_w0Ζ#G9f>h e{`M~a4vPYwoԯ/S- ?8ۡuro{р~yĠq#G /hb|g?9^οMG{o._E:AG>Z7O~xr.NRAg:J(#vcD,U8d|HHKIyNJز߰D af@Rp|9N0$Θ#>oA>oox*2V{a`s} 8R&asĭ02}I{wB]\bus[s{d^8j^pFv ?^DS;sOBE^w6+D{>H!'&}\ń EFbT(.R0G3,' 9lo.T>:~dd'++p\ ,#x?lڜ'Mj+eNi٩;Zkگ9z/|mAoW!}NJN G oMla(I0/X~9/D$7xA5ӲA!.mv. Eũ[Bf-|8[?2~Wdpy";ERxJq5RQD8nNf6/'%i"TA 3tqch-|͠&W 7q qX@)|^:r_9=8/03h $,u %V` cx9@cn OaU.L DBX]*Q +%ay]7%MlprVDa`XcTȱIFZumih|s;/;Y⒊)Q(@Ҕ5%DiГDӍ̍Me1uف~ŕQc3wH'1؀jgpLp':)5n(M6#e1)Ԁ e4/ZTW*꤈=i_@ >7۫ʗ w,Ġ8-Ir`jyHHD"p-w34BQ(O'a]% t'*o 4z+Z/F1zrf!4wD/A7;/Q 1P`2](K#*_(S[~ڗ[h%ܠ U Hkp3[0=jgpzw} }eS=99=&tpܼ2#yzπTX۽MLA9h 2N+jm_յ% (pvxQm'4{OsdP#jVq&En@QM %L31" gF)892:; 㦟--NЖlѬﴟcL"!}LpI@U/>_8J%F*i$7Q"e4Nr~ʜcIH Qr‰!Hs3a=) ib0y,z%נ B\;@||d,b+8=Ku`^|:DK[+.@n&yb9;8 \+OL# Ūq.8ZdX`YM#GTD&_W8DH0*(ιV9*Ha)jڶ3#@Pa:cx[T7-} G$dvSd0/`1] 9ݢ{;(E; 0N,onS4Ǻ~Fmp!h.ֈ ds"%>*e2P_aY֐UWim' Hk-Y \$怅MI[n|>w@*LgJ7JL8ip"r)*Q)*Ĩ> WK\4 (o3,gƋ ,2 QGv8s"1,fXBYC#C\qJxjs[哊OZ*Lu~)p.% ]ۺABk2ZOZF):E4aTSA˅PH MEZ*b0j_p8;NRޔc1:1qބJs} κѤ~MjmZ簫~J׽HJEkJsÁk0 găThcrq+!W]H .! QOL~5o_ph:_;EfG#+ͺ,޳ %^KMmK+&fb ݡK,v~qϻ F#X3\{ & 0Vկ_:JH?g@^@Ѧ˻Tsh>p.iC%-} N}!Rgn67n(!ĦϸU>֖Y~Q >~J.9 [V!{jQ=RU WUoi_ g_d[oNq@˱_laxZ 7CΡDsA(9$ V\n>n\Rx\ߋ!+ = iMmoq(@f#!gҵHnqQ3G zit-qTI0"Yx@p0Y}GXlpK0~P"'Jwmݯpq)H{@QF4SIWŷa@mTf6ˊm׾ :&>IF}Ag(qupB} Py5E&=m}!T ot!q !^sػ=5WstL 5SR㍗~R1;+A?ns]3GQtqhԨrGl\޷p:TI8 -<'x&VV1125stJI pv0rN* Q=t m@sѳQM9:&ÅG)S`Q<+8O&{>e9UݺP+wL _f(\k;tǤU{ D!`Ԣ4@[r;&> E#RvCA1o)3nhؤ{7b>,Z&}5stLǪ``KF;vL,`F")zG}lԲMtL^`O *:k^8(Pc#}-·uo:ރniai Fq4GZvL݁'޶I ]8% 2Hhk8>Ds7^T cGtB*{*: 2= ZX P)M@UǀI >4Cv26+hx~kR)B״g' A2SG `Ot45WjxiV3Ga(?qҨ2wӓDe4NV5m" hcv˅/s+wkIA.߶wiP3GQ-Jۚ9:&O/nv3nNFNbuE{&ʚ9:&톆'FT}qH~{NMgv:Ob63jm̞19(齤u#}WIk:A>KFA/q"b Fb΀X`8G\(8;{ustL~kr4)f<6Ux\J6 m$S(Ce|CyiԠtE"%8Y %),1"~ДӐ?Ir'CF#x>[yFiۻSuM-yV1 hp4|{Cx:T\c5U"[2{ ǪE2^">5stMS\I7 JZpATV A1q>I& }޿ IOѣeNҠwWV0\z6pהg\/IcFN}tKTvʡ`gP&(s7CwI V(vB`wCkVeAtWO —h^r@n*Gg_?~̧>?M;G F* c Q6]!aS`?%cjq|%k9jʼn7YUBq˒[1s78 ˭.":F s}e}2BJ\(wp  O=cF@1gz 0K}(CYq#evI;9䇺\_?/ƭYf} sE. 4~$)٪ۖ Ns\EPvaQ(}`jDĨd=;ڰQltE%JrOH1ӊ^q+ڇNz띑xv¨Ћ)T S i"s\!$%.$^P+ haS #D.-eQ{ jHZ鄒1 +&9NmdA۠FH"I`c.l8S^j'^3$3tRm}Oo.l{4ixJ\~EXy|\:%97iF*]3L->C'8JU^()@׫^\ \Ba my,g C5KFH5!hɭ2TrbЂ%7/!^,|K r@,.r k$d-ăE*[sk kU ©'N!aȈ&*H΁V ,rY%W@*F_bTYj"8rl KGEcT EQ gcLA p[+5QE ܿ_ ?#LEnPE A$( ȐJqKby-5jeZn1>Ig;i#Kʹ\ u2z\˧v.f/|g5h2Z|<л@a+Ф2` 8Qu ҂}QĈ\p>8eX͎<6jo;hu3_}_8[C3ix0!"K#iE̽>it)txgiCU^tZ/*|,5! LTY&$CL:,>RLP 4+L2s*H1B,cƖID:qȡ3KweRΌ/V@HQ=Ry ܸ1u`VGs]Fץ#H"뀄s,Xdo‘ć1֨T^ OX0;LJd[_!0u-M?t6h7E"N[rA;!u@ @#X1G "A p>@qiga]L¥f TFҎhZr^jFϹ}~쌸,Ɵã!p]0YaD+H;:[τZ@|S`ZTxGDJ'Hd,âҌ;$؊  T $@2A+bqmbԷ=L2n]=qvaWT0-18 aԧ kj@ ~k>Iؿ:qgԗjj"HG1GD{ጕ$+Q20Q$6E##LgA+H&DI9xeʼq੃sWQ1+rBwM;'aAS,qU L~./i4)S>U0(8h8IM 4l1M# R0$< ><S ƂW~܆rH қϗ\6_9mM!MM%M+\$zPIScՈށ„Sލ~"\֒jCJNĕ[wJP93.z1mg㫜)J0F/A.[e rl=sWWE^b[L25iDv{ px0 S(>].V[_ D85i!rB+V}b ,7//SAL/GL*'_,{u"/eO|!俶ӛ-\ XK.=ԣ-_J.e-nUjǰ lw3O )UR+eo#A_ :knaKxE:g&~"pvɸSRNhx24W&3Jg Meq )>͠5y "m3&J=-Rn|Q2ha`@Eۜ;\ T6JO23!*d ن|vjlJa^iE :(OcJK+*{9؆5Cd "t\_,A00Vdm׻o5ҵU-Ze:s)_u-۰Vݲz(pj\Πlf8 Gn2? $G*<&uDhǂ M0&0}F|탶|:C3k ;pR;x[e{BeD΀.~Z? X(D(BB,ZXJL!0"4g,>ZB~r-,YydcLKI S"!&Va0g;Ja8r;G5C6չ&K7GQEƴ{O}{Jrsag~/!ofINN*GoM}G>v>*rJ Kl?bM1&{jַjV3e: ?4XQUW?^R8mIid)-Fe|7oAi&NlęI:", R7$|c7-U+@df$1eap.xo2X^rÜQ [Xc ,g}XϙZ9vZ9&r6ȹM:2,"1rbrX+?"D49q \7]n1S %0Mab/c'دv?Hq CO)B]>%P~v7N_| ΧS|Ō?].9c>IwR&pxur1Ķn^Al ݀~)rӿ7ŏK:^^﮽6]G,ZPy'te vQPxVQ~fk|b&ruOtxj+?AD*1NJ|6Α}fm"_hY@, 0Ϧ03T|+C~ ?@^IըҳIh,< Iڃ;p4)E[1̀x LqY,CjtXLjA1ܻJ|nF-\g`k.ޕq,2?/ٗ>ج]. VSr[=cDs9dĊ8þ_UWU p3@cĜE NAcI}ue|qᵧIwscxu}ЦaOY.*;ZnEb1pZtOHuRHb,E8M şߥ_a[:Gŏ UNxc?K ^ A*Z,rJ\*eʢ7K0i"M:50<*Lip0"#)hBSͫʦ:GU}y輨~ޛNSZ566u;ߟ1A k<=jl>AE`ktOM$cK )@%s yX ȹr.K ȹr\9@Ϊ"@>)1,WX֛jZo֛j'aBhvd`5&VH˝C@WKwZdXJ^^Z_eT27u*L-pX9 #;SzDv+:ogQ-.!Ddfo'~y38g-Ӻ+90w>M>(ӾdzK$=mX9|W8ku'k|bm?iD-h~4~it ]) di#c2EgOEzc},|l!|ărGBӀ5U 3N IzNqRi=FgO(vY72u:R")9h&(aXEwZIA"A$ S*C:{U.&.(l-;dX͌T{q AzèGKDAq55\ Oәgn\ޚN5u$##pJJe(u(I YS[fS,H ~Icy )"bH<.J:(m='1x)'v$ἳsv짆X%Wɔ\^of4bF qj$f jd>Ҧ(cBT Kj n,xWů8_Oz$$l ! +,^}^3pmj ijjc[d\$1E'ISL ͫת7h0ZÑ[D S䅺 b{3 1Lh⋸p Z~M/%4<?+_LM1z2u5;kmq-;h8>?U:./U"v$aZdQ噔!" \IA)hƁµ}]HgJ8%! H -Ikr.tDWLŊ?MJ4WGOLUToTtzWehx[ _SMD-+`C=\:SWLv>o0]yeSi`FG >qN@/ ;53>;/^x Z_aCZ6gwi}?r7>*z3Go)'4bVGUP@+SMЇ".Nm- _j'7Θ(]` 6RK!ED|Q2ha`@EH6m~Rq38lL:E;lrٺ|8-ŏb`Ԏ߯ݙT5Acg\pabo LSeo)zW;W .fޖF\&YjCX3((dQ; ͆DN Ҟ h銐aw, %/5ӨXNjiT]&9ofCNb GA+)?ټt_絹UN6kCyQkق!i jU_wZCpy R.5}3Z?Jc:=Yc|}=|Fm-ð>zX/91~gdǙ 0mml 3hGGAۤwwpJ 2{o!;;hFY; ZDl h:aFhNP>@b?qhCc^70^?v=!|;Z<5No8x! E&h\N(T' 0Շx<)E< ` ξAkނuso|/.j\w}8 7\_F(^B)hi<eۿÃ0a&EMVJ8Ϥ,/i~g{QU3<|twР:ָ!S.vWoLHk,@q/aN(EKf-G ,QSLFM1|vFMm2m;rVҮn Ú=q VRh)Pj8L(޼dy LqˀY,C2Ncʠfëk2U:)jt /[Ƣc\WWq{L{ѪV"1wЯϋB*6MߎNH7RjKGK6;˜,[ؠࢾ *^נX(̫вC7;k0*SQiFT>]jmn)EԒ)E 0˖>SiQcdk#ivU*WӋe)85e̹gppJYR{Q!uASnHWRRrNNf6iլKO5Bbıf4L̴#eW hjXAD/Fw \%dW9]wx7V@G9x@ 8xo&ҽדܑн-)]zRޠ=|Vڥ~DU#oUI$F'Q:`r)]d`I9'z߽SR} :JeR]I%c8fT"t$\ͷJE;ɳØ6!*Gj]xS M5U}ja sD?ER:AcI_ߧ['ovnd ˙9('Li3sy4ϙ:J}̗33/gLi3sy4`Qkd`'na9,J/Y@K$zFĨd;:<&vZt0hrBSNGt4y,@/xn:8 H4s9tR7BJcB4ZXA`ȥ,j ޿&tBIk*P j4!13 J?{HJ 4ҍzN#ͮN'cmC7}ムU `C"u󰝕Edd u5\ҡ̰Π`_1lܭtܡ!S2'%LWTZ1-!ed&a#T%ДMYr;ܲiO,_qog3 r@,.r k$`!J*-( rJG"u*gEaBFX1v!hQhŐ 2+!lhڱu6-l:Ƒ{gc[0>(JGY"91kK-*@"pZQ[Fܠ908_I0-Ƒ!▂$Zj1 :Zn6~Rn?e8??2zqDQͱK*# L+B)I("Ź!+N4.;'em'nBmſu׆\woc,MTI,b7˝>KE@""0IoW%z/ߧl l K0£!p]0Ya`1rYiGc:[τZ{d <|\^khfx,aHwD@ TDV)0-âҌ;1  AyWbrNJRq0lZ$>k߆"3LF fjf 3 F=\"*{)x:Ez=t4=iF$c+)IVdaH$h[B`X,i\@Bh*w>%qhOEsAsta!m3&J=-Rn@"p_:Z%GX>Plp`!͝DQg Cj<vJ>r2걪H0 惾JI*~K`χ}7]fC#@0+yMVꜨwQjc֙^+c2_x]en0S_uD %ܣ2L>Êq;|;R +@9᩾-c̱r6(Ũ}j-fmj"_jY"MLA~~hQϛ@=;п3%`SO;є+9yLjwU-c'JSKb;@VIDXSzrx0 FcD2Y+냉KM-a$EV 1MI[˭~BX^m6'񷗼<<*mwnVn=}ZhyޕMt"h@QAsgт[#*$;A Att:9lO]v y/E]x=y}H'|JszW Z<Ar.WǴ'Uy5~"j4iR-򩩋1uJ m?q1 >KTuLlc:`HuL A4g"NiD%h .gA}hQ15CoGn3t:Z{7WG+}mEҕ,ì0:+Y)\ZoCELU0#T8.,~-na >%6SbOl &QIG(TR"xHؘ#TX戢Jp,gT*(i5cgn?Q_^QDZ @$&?̪gVˇVawű& XzI8+n ʦLCxhn+b!@i&WLF>?mi>KLJ$ zT9FRLRŧnnVS߽ :ף>F`%*ϗ I.Sxj2f,OgfPa)B=-?ÝXHu\ɣ; ;~6Qa#,}ykZG'Gin4^߯͌{ (ӢIral0A9Gu1^ ~ հ)#h9Q*ROF=Erzp *Ukjf_C2L0zoo@cJ4I INo-@]Z:ekFun NhwztB?~G`tv׻"BC4s%㰁n6(`0Au@oN] ]ܭ,|7q·SsZ:J=H6Ƅнu۴ 槼ua)GD>/_L#m*W.t20r6 ?jx1=N[G1ZևߜvDu7[p{lz׈J9~Ge3ޚPQ.QBն VpWPS4 ]j+bwf,SykIقfwX5Ox% -e.{^.{Q.{^fe/w]re/w]re/w]rre/w]rej.1$EQ.ǟ[EXnQ[EoQ)-2&-rzrܢ((zՈ1InQ[EEQnQ[EEQnQ[EEQZ}&4LNOJy0/dKa<8EO}EARt>m;sL(QmGs+:2,"1rbrX+B"I`f'! Xz*xgČÃ$K֑p#cv'R:q瘯R<.@:\ϙMWW?,(aJf'-1 Jh+S, ^kdgYˁ< |S(sVA-m=w[0 ^k₮ +kY׳ݡ캒IuoFύ 8x,!d :DwT34vR9%-AI1*kO3tzFπf˛05 f.2N|FHXiLF +(`EﻱJ'a0)c.ZA۠FH"I`ci9$ 9宦s?K:twSK?";ԯ8ڛ}[ʓQ|P &+#)&!D QZ D#,cV^DpSx$T3I{$ָX,ș8ȁ+t' "tznqt b! X]rfP&m`aȈ&*H΁V YgRHܭ0 /KY'w6%#1.~% S6ȱta[+Aٚt)WE~agͿػF#Ui-r3rQ TZA-?~R'T:plD`hdA5ZRxd>Аd"Bۅj5&R,":f:xmLq$HYDE2gNxCCD.O&X@mEDrU*ѣSQD +߬_D 3 hLBޫa_cq4ԇv𖢼r VK Bf m.\As潕WXB܎8Gƕ j]Qbm 'AT2׬! i+zW.N:DeFpVfCLϬڼ25$\rQq;E?w 8 P|"2T$Wk=%,C{lNjvm&ijb (w[ %\J)YíNJHܻ粹{~7ʴ[1g>XYtƀUQ=/JhFۼ6kkڼ6k{ўⰛ;Y85EsQ:~yl:8\gXh~Xg]l*&O* ?Su Q\̌2{7? sZ9+mF,u3RX&y]~_(7I %Ye]b(^TLCv؅`%(, V?80a8w8V=FT\V@x2~OQzj "q@WGǽ=^w4ƶIŽ}ǶPizpo3YC `qೡ<;kdسNK0fcΆ+&$, ~60ɷF,(GЁ,=3Ά ň-qKk{yoYVq68OS !U< z氱Ń͇FPsX-c'k}>U.׭j'KqNS{rI^=l׊8nh6M[M֦ikӴi4mm6M[M֦ikӴi3U9Se[( |cXu߭c:W",r!dS>kƚ/h8~>!r9+FEءbGIL.pk.ʓgg{DFtT&s?  2O}"\k҉ yi߄Wo;Ɵw*#:$NVkB2N AR-lB-lD.8US"_ipa'Guޱ]ʞ'7Fs.p'!(fJ6`d1tkCخ KB*1PPB$^ȴhkQ BxDQYۄ :єjIRQ5D%UWmB*j>55lJ`Y18($p\v(Cd(.(CS0$T#N] j+߻/ &,ش\qoVh8UR?bCnT2lY>9s6H6(H@͈I ̚r )DA<I e0m9$Y {u=i&.gJFf J _B7ZQP#EEOx/V},*ټ5={J*u_s4-z *94+΀C,`"\p+$xyӊQi:iH&* $$SYb=dc^2͓}H"5HUM6 Ddk!B'Yl O ڸ Xʩ<˗NXLu֪,DX*ZV/?CY=B_F9r/~^*F~vquXF~v0%;]?w2R N53M(]CS|7\&Μ A"8W>-_g&Q9MDA˓|qhu^Ͽ5b7Z,=`Џ]%/E |\jtr!aWVIqeQwUbzm$"NI>yDgcNyƼOxFBvoSQ-_DE 8gOPl)YD?Tw|E((z>rgrA,K(xwaw;i8ˁn_;ZU(00Y(Q.~9ΊN.tRkt 7,H۾3?sښO3 VSܖmltmjv^$124Y!"0B@+<'B2*N[vh7hnR}8Elyb&S@%:QWFI)a YV$ GN-CV#oR]|>Yd/ k3U@ B( P+m|SmKȱVJ/a8U<2`)kLHڅhp`2Qf$d20խ,)Nb(Y)IIt@$x_:ucRzl>Uey3u.|I|f)/A>F.6ӘG[-Xhm 4wF-ۢrD`p'%Ri\S/5l]od/K6æL2wbuﶍ^ 3@6WH(ؒ[JL.0"V{ؒhS!x>ld =!"}ahpCP"!=XZYh6 FW4(.>]|U!OJ(vE;Ͷ=z8 ( }fծ|HRТzzϞ:曏 -у-DuWޙz E:OFE"7cKl<8t0EN93X ^?p2=^oWkMãcB)7j<[֑aYqGkZ 8]A9:_:/>;^@'|s%A~޹ӧ/7MooF VL ͫR3V*iVR z=nR&l).qM5{[B7rͱQz<4וnw9>Md4ȑ6/!8GvGTKCc:+KTN#nb L?MoXD,Gve )BzHUiwT7H}sX2ˊԎR\k2髥1KGТ+ ?.i7f#rnBǨ>O jF=sBuDOe=A(Q"scϣW80-l(w}њ MjC9H3kF#㕲J.bvmתZ_Pյ%Fbr;iEWfq7)X?y>u\q|@n1x<4F TtMO7חMjdl^헝84Sw|ZA饥6J}%QDu5슏~#r ~ &2DZI<5$o=yv4OeؼҲ {ȅ^s0A`vpIA(x>383}7 mc!iwtj8\9ky(2x6,, >wʒ<]r<7 #CwM7%7hth:bWNGk1"Ej+"8Ci/oԂEg^`- ]S*hfюgw -?.dt3G&]/S|ϩ/أLavV9{O6pP<$!fiq7b1Dn^߷3Xs1s RY+%)6̎p&n4_Rҝ~o?_9΄7‭iU"bK(F[`CJf/hl}UE7ZVo]^t}|γ~zj:ۡ V+@t<`D(_']a+SsaDs>GQ!Kpedlv i).djYqI H4s9tR7BJcB4Z&KKYԾݸ$V:d I *P j4!13 MR3-%[[ql"+kP 2aod2-tr)aƾ?]ws5V~WLJNu<'9@&U5WTF=SLhC(`@<$,jCВ[eEAs[P|K`يzvmAE % At,$xI%T.R@ŜCX\V̠*L( +.$"MTR#*R1Af% M[ֳ֖PJ Ԭ!AOKMTG| H(u*b"JxMiG3+ QE5 <4pve, aNA[""Q ъ(nB(IdbWo6"ʆ~{E8n⦽,"*j@ъS1FxrHLa p`ж""ySi`tq)ךPM0N a&]5w9p:*lClJa^i,# `gu 7(Oc<Kʶ[[NVt9gG8Ϻ̬NԯJ͜:{?ݶwp/|$ve~18P0덪q\UA >{gج=,ΧԦ^@Kn-/;x>->_$-ʥ8qf3u++=eE(Mb ֝JkӮ{Yk!'uT2wʽDBR=_U')4OQ)9KJ#1׵e+* y%[P8~wgkm6_']:j#TǸ 27cɞ ]34w*5mzAO̕~R:V(v^ ~5ϜXf>mkg^D$vg(N 6Vwl 3W4:/=_nc̙ͧ$f3Gyi;7`˳tHנ3[fB3quOf̼e3vy1CNGls>s95Ohٔ= gNP$NcVcyNo_q 5nsrV^b6?RCi$)鸬xѬH{rnmv_ݤLe5.+G΂t೉6+Aj~|m+#v`T9F$㑅HqSf"RSFDD b FрG!eLDu:[ 0k+I;ڛD5;gG׋qquJo^zeU߼jٰe zsz%T8 y Қ)RRA(Ŝ(aH+ 3{8IՏ&)~lEP,59(栘S̝{̊5(ʼn!04#YvL{Dj$WzlQOrm='mظC] NoWKg>M )*˱{O[n״꫼0.rVTvtŠcn?ݛ`6W*V3W.*@+GL>Jv#F,/-D=5ctb"@'z.(qanu ׄġ;m)pBlOoyZlV\Cˎ(=܈bu}Pa8"­gd>G_o75#i!s84!F)s=aR|nxy՛m?aZijsyY!ytYȽP)nų^x+8R.h'H!ht+ʶ|ST+,%%8ykYۭZNlӵ_Rdglڲaֈhby@Ya`1rYiGc!X'K $Gt L!Aw&YXwD@` TDM/`YEw"Ay9c)P 7aR]=|dw6e={W&(a2_Z͌T{q AzèGKDAq55\ Z?'npMrMMI9lz+)IWdaH$vpdB2C0u>Ԯ"=1<0 W1, PW{%OY1/qk^,aAS\UE_8s"F"E7Y1/F7z%a ZV];<[LVNO8}WFjvv˩tcYgԣzq;jv3@,|9Nj ͓~{54bjS_g2RI+8+G„Sލ~"t ;L?N <ξU1= P13.|"Q[*Rך`jqpoo3PIp?RSX>lh}-y1譪#Egj^+`4\Սe2EnU7w{:[ SN[lܿػƍ$eo/n-;l&/;vF#9<,_5%2%5mIdj^B(X[*~]Z~WX~J㪝.p1:1ĨnD"qLEuGE5Yr/GZ0bnL H; \%Mc#}_2m)*\}`LX &f`Fy #qTչtLs${Uκ,"|j$~ipۮEze?נu|"^X[ls|mSýv[% `PbFS), -@{*R=D7e%>?GyvD%XQ[N;|!bOb/޵Ċ%Q߸2Q|\-xMrdYP+R[6>)?Dɡ>ߎ<H_ENH_8`1 uP0vY4|rVk/;"R[,()m@fc`*wU.6q^bRn+oCbXP7SM3X)fSD,iAo#}M_ƓD}W\a?WlWs{ǦϋuFӾbWk5ZC[!vc; 'BZ*97jg9=mz$=`[K̈<"G{2@JFtJÃ&Kl" ʹ`1k?ގbF]FF;!oA˯p_n{z_V]}h{5%ޜT!+A%У_nxZ`K MwW)6m2U'_%*-CWpu6?e]_]Om'hıf*fZeyKډRWkdbSI$`8dރk(,W*oGwW7}\607̕nU)_ _U_5U:_EҲZ,^?M`Yq$$`nFr1DMI0)CO7eB Rſz+_ 2Z>/Fo@iXݾS]ؿn~ vRa䗢vzc/Uݜ x LqˀY,Cj'1bePes}mR*nq]^eOͤ#g~ Ah' 8{dä/,d6[c5gMEۂiVrs&{:Q:щ-8ovmI_F#E-V# g@@7ָ'W%9g7l=ϋJ>kP,?AM WtuPLÆ΀:[<>z=%W0{f&/YÞS73 yk=9倃cx APIgҴe%K'518}-}-.`ʖg>My~ENUg;BQ焜T&Ni"ڎ,xrN4!zvx-J2Do"H~ⷦltvo׳+ǯ+5]082,n_rropp"|?D-7'}!5}|e*{܇"F8'7>mQԨP.$۾}}혲uSӎeVcdt)F2/g)BIՔ Er1~|| ",'C7ؿHgQAf\?ލEU& Jۯqbp B% }hC134':=C%onѦe]%?\MSXv:K LZ4X .ۗ`ʱOlYene͍OZTjm-^ <] AHRVa 2LwZ D佖豉hj4BZ"nַmֱXJ/NϤ_W#_Sr!Ijk.(|8}]`XZʩ$(,VJ\>n5cg;٦_-4kCۜk{@@"Umrq큑Rl q$Ll&GkQ E|lGm7ydF/cL)"k/8 QVo6K-.vR} <eΌ9ܗg>wI9a+=x{I_IǮM>; |P~57pa|2>=C5qM0ÌVȶc{> Xk?kոkEnuYCb{oɸ/݌%C%0kjҟjT| -DE~I{4ն:}Lu}|#b_ap"h@QAsgт#*$;A At(~Ns挞Ėeӟ>n8^o4T%;Ĭ4nY ~Ѧ3>}ʀ3XK*O ;[bV؀39g3EԞ1bXB#gcQ9LYA(k=e:\[bɴ$+LXUnQl=N;[B7z~)a13:̅^~rv <ּ?ç`ڎ-c UG7qU7.ÀWL5k'GH5 nd ~f.N'{-_]!\sʛKEsYs.|ݏXt.uujS](s|<$fwdT<( PaQ*S;%_"EDOb3@yy$gf8jrV& AEBI#n+ F0\ZGJ'axaIh m&D4rf;3יJPYu3T8x<ؗZEo\ 6[#7~b\][)|$!Lr![ʖ(Z\ \iL1-!eҒE42FAKn}_Mr}<,<م9s b95DxHCssamrʙADP"@V,UDF4QAJplbH6Pd4,g^Ur Q'W.?,uw6%#1*@XiO"8XAлY2Ό˦-euK5Қ* C1),47i-(RƼyXf ]oTV MBu k" O Y%a`׳c}\"wl0E7TUP/E/WJ{]*pW:+v!Z*.śRnayөFܫf@(Vy> '?Ns0C?V*)6>imw1y *@ıγ(,hIZd-[Ƣc\W:eICe[{,K˖LG<X2ti@t:+:AGQ%Wgz㺑_Ү i,0 0 `k"KKAOs.6;t~$UUE-ލ dzEQ\ NqIw҅5U:!Ws=nlV}Vjţ1l|Br, TStczujH'jV[ǔ)b?84`7 =qs$$Ze& )  a)| X>WB]󧉝[#7_Ǘ_{#bz@83r|qCĠ)ũoZ_ cjY rrXj'2߿9~uWK$Z{/)UØT5(Y껷cMiCi #a8ڲ Ff7ma1$NKnl .@A`O!(]*t*TQWKV $ z3aU$fShù_ axý:k'լU|vbr>~Z9zD著Hɂ}ܴ%BVR% #.m)~6]YpA*Alj,uK$  "+DqH9qK [-gs%o1yM),&#Mh5^Kޡ>e)AN >pUW}8' /-,zT, 9+ =dY[E:DaλzvUl譎)%8~#f8uK=*EJ363o!ԹV -(թ=2ٖ2He w3UU1-q,wzfH B+h2nJӳ4&dρ_ axaP= %mQ=(bA~.uf_"ax74^w{V :=^_5jj!(Y5hȶڣV03d $Lqُbr@]ԋW~rÃӏK{Mlku >8CĢD%{ KU3%FA0|4U^Q8 ?/axыY`Ak$GQL:'_Iϰ%FB w(*ž)ey"gK$}M`c)jG2uRPVTv́_ axn4x Pma`.%bسHhkOIl*V._"ax+A"^ե/T!dK΀_ axq4I:67 W?-esLa83H^,hu)6ȤD)s>'@V`ALIYFr?KPsq3N #;coP3 !S(JOT Y;st54|.bҵIB"H>Xʠj2BN{!i1X,06)zvLZ,$mh #% AϰWTUZqe6xuK$ h.{-o'lefzncA*͐%~t.2H HfxxqKo,yNH5TDd~$U$Q ڡft~!Vҷc&\\|2&$e #ͻƀo1K5@EAVaͮ"6uƫ["axi4WaxP-R֓W b-K$ Or{uoՇ_9kx1%G2[tRAW%Wͬ/Zs%~7 `]Mȵt =d4C31fę*6#zM!Y HTF`g{tjӡn'-YLYά֜zgWpw=px1^]~/zmׅ᷏ՍJ]!dJjsFfga#S!@( hQuWR cf8GfDcUǸboȸc>M[Y/^rttծWFR)պƠlG&_'L]*?/O7ɻ1,2.>XE:B?̽$)P-| Yc#nٹϟW ݞE,xs ݓ{M![-}0}Pͯ3)qO#]oMgw-z[e?/E!- }8;7z|xuFlYNCaEw?˗ [mŦ'izzx>aӏI<ɫbwu7 _; $o<{>8]};D~yɜs~Rϵ/~ =<75_=[ePW@%VOZw|&*%Ok"uf}wv/b<NR?ԟC!sH9R?_Cbf7Gs҉KPԽՔ䕎SQC`ʼn>[:x5_GMKG3Ρ6ƥl(9NuYCFF1榙0Mt}[.A9_#f)o#|ƏGݿ` rzj}"O~8m#զf֫bukFSɺbɶ4~Χ ?U[GAk_sZ6f 9pqz쌯34S2>qs惁b- 5副8dq=ͷܯ : f}:ne>G}NB*x|(QL1Z7w|2?]g?_fN n]&:aG~<:;vOM:IF|~q^ɉ9.޴԰̻Sk8Qޞ_8xnl~z "=[O_#;i?<9"1iܲAWEK"&sFb^#^ t/l'I>o7DƎ9u,FiѤ ^%.B!\:P#)Ֆ joBF!p>`-J |>Vc*q'uמ;BA+6F99gD'bŖd}9nuk78ֲ _%~K>;r9G󢯒u~A}Q䜽 2+iF] *qj &К!CRp s4w*a59ip={ŢgYTAf̠,{YܼETclYT,g@JBbz kLZ#tt!Bl2G﹚gÚP&+PikǏKKT+[P>xhZZt Ym [l scZ0+\ց,d@qd| Gγb);$$zSMZZi:OZaqÐaPdpkȒ,Q3R(B9%iMf)6hEqLpzsBq˺F߄'lgb63YKK_Ϲw,m`^FۘiHf%@|d/m2 бefЦlrC7I[{^X5UT§B+B$IeUh< <1y摦Q*AR v !T/-Ŗ]k"/x(T6Ƽ7.Q֕6ko^y>_y74:::r|3R3擭s,?JyɌ{:_sf*5}rOo p^!'8UWYhѢ(rj'v#w;LR'!TeIsZe NyƍЕ{aٻnI_I)a˻P.Xz)9c'`tCeaL8յG 0tQ]{Ԯ=jkڵGڣvQ]{Ԯ=jB g"-okVfUkVf\^s;IO?LM3:TQ|o{H۽z]pD:>#*+Ƿs8Wa J㻎F&]q,vAJt!/Yj+׃ JNb M5:;I5/H'l@Aۗf>R ̊fXcdBɨhXfB,GMNY vzoH²664WSɘrEP6I$L,%CYKlцІ}bd7$cqƌUPQ+}6KJ"Hbr n% eegRL $N)iZ -.+[l[ose_oOWGåLSZCmtF'*-T%BH2u#wŻS4 א`tqQb-[Ru=}쫃夦PizC-I3|Koȇ[J"FIJgYR&d^*a2ވ';`&1Bf5}kLҸ={ّ'KDgeƖӧ P|e^-=6e#[6!\B 'DN)0NzǭA Yȝ6$#lky;@*KS2:࢔ ޒ`=;ڷ5-s|~DMO ^˃+2KCaɜ!#j&FcQ9kj|9bL!.2OAӁ* n&܊B6]l&4Nغ)M+LNY-IH]$*|{< E{2T >nNi<es$ AJz@ AI`Q[&t}m_c'wԙoe$3\3J*&.CAQdRpe RKS5vD AHҐ%ZR5өhKƭs tr? 'Zrmo o'%ڇ{jҿ`}ßV/+q]OGʆޏѥ˗Ԭx[ #Fw} C};Ytzu}J뜤rsN{{4Ǽfh8c?Gu<$WI#+Džlqx,%8'TK4vHڻcHʠ&!ψ biP;( CgF)yJ2uzOgMN7HM78A? zt3~^_tb.>*/moגܺuĐL2IpN w#!')I3d2%3wu_5w!ᔜrƀY맫#B +Q5Spsx?><-}jSQe=;{I-F5 ެ>G%.[6;bY]7t^@wq{pM,[ze)hH5U Uyx^V.79/6 ,q33rޯ?һ$C{MMB)-Gi5FG&,"r_>Gg"0N,*'&hs+Jh'vL`D&w3l/97M9r޽9|bEԵ1bKbߡK8HbRk_ 9C"q?Mi p%"/^e!QgVI)cET>@Jf]Ȅ)]A5y$[jCā8kā's%v=@BR)+u䖀R.>{S& cpiסE.{ Jѧ:>b(F7q@~&P"%I+Z]5^64}n@4e gH?L^s0+/v_K`m65FXmi512Jz[]&,PTJ2ժJΉ tj!=8PL;X]|8j" TT ZTW!]4T+!hX/K ` H7sX%2@BW&}$SNЁ%<Pl_+vn.xvt)kcBNw V (DÕidcg-outoEF".:6^QQX98v?ңZ(1!rlU #{qi N#9y%hZ!t22PBt8 x4Ml^|?=鈓 ާxWtaM?AWOs( }snS=Gqv?l)8;>O?|xBU\r]))lG >c,hse1"H/#KR9͂QAr9Xz@{4lWoQZ ~GHƘGftnfW~v|WhO/4^DH cSl֖A`@CV&.R'Ze=Zm"׿AtZqC}BH:M% ,Vd72bbb oBܱͣD4&Zrqr|Ȗ܁$E)JB4bdtD4PV(b Y4RoC4GKJ^/ ^yq/B96GbFɈ4eD%)CpWe5q<0"5xH 5E#_B4ǔhӶi^LmQoC’o9,%ADLwF:N4 60,M/#e! D"(1Lѫ\j2'62:HPv%D˞ }ׯ]{Ywώ%ql֓m}8 ~ثK {qeCѻM m Iv>?59Ba S.!8@vey,ީMc>TRG6}0/i\u[r!I2]gE{Mk$q'ҷw5F֡-dv;_nNà? ۮtP,gw+g4_7|x_\+}>뭎\=]:Pm:Ձ]gt|&ńB% 0!SgJ7U%PY|?Ҵ1iHN<2|٣LR8h:k6`n4B}L (vAj5  yKɬC.3hl 7;м! 6>8&gKqSR&z*Q%P"a^,55ƜiDPje2 )-d572 0җSy ,1755i'bCēX>`[ő-~}dn>c4N˱8P; 8NOV.}w۫b70{a%d̾"- O_!/B2i FBFI:eę td{dZ:i¼>Q'Ldt7[T_.'mׯeSzo1rYe 7 7 7RǔddRV8)%)P \ANQ }5pS|&@w.{|?%þ %]G@-?~[n~nӴr8Wــy<`nD =<2(>[)aLs3^5Nq&Z6d%o[L/Ozy :TkVnHVnc Y)mq8EDZ-"u?N?!~˃,v!,gWV<&dt>sy⁇L[ $U6 V :~#R(/S[qR sNK4h&wcO^B"$K3 h*;^h&]AvzݣEvҝ"XW(Ց(\}FY.>o]n||/Jd)f:&uIG]152EUm[rKQi1J>dSVIA:8"^u`EGΛP(J Eֲn]מг]zoj}6s_۟7!4FdFs,<*bVJQ0hi] #qd*$Ku &jk h_1h.dcN#2P8`4JɁG2i@Қk`>i=G(vSvܸگң;ھgy#_QcAY &,{ JvA,siz yc J']Ψ`,V Րel 2"A,0%4"aMd yI D #A,F CȅF3 b\aJO hjB\Pav9[٨Q^;n(/BmMUnTsZW$#UɥG[)ugݯX)}uRЁws>aW<(LcDz_Mm> gǓd\wii!cL/t>-wfݍ\nMhJ.)L+;LgPehP.3(kO"s\2 y^gJ7\aHƤy#u7ʹjK*?a lpR!8@3%&1x'=m]hNn}o㛬$S͟Wc}:+i=ʢ W)M4M9I;.@`6Jy+8$KO&-A'ӁkZI?+^~c@!f(J-Aj gnoWylqWaP=ߪ͓w|{|MnG}dI_oK.ltp~mʦt5{_?V<SU쳝w@$c4 ]d4s#ُXß+o]#*6Q+{*»{BNq8j4B}L (vAj5  yKɬC.3hl 7;м! 6>8&gʁHxR&z*Q%>$TEx_a/crƴ"Y2XRΚmDg (BYbnHkHkH;$NaEMY fznqYDvw_sҲa:Y[!r1-%_/2("A€B(yQHIpZx[MLGfGePl&̫3]C Vm/8>$JsR[(S{_=TK^p9DٲA`mRLd ;=EN)m۫rׯ62=# >xgrο2_AffY7jdP<- 5LSϔ{p;ŕԛ\jm< H'AV5lwem$ImL!`.}ôG6E$"EѼD%%T|U3/"##2O/}>'9Ҵr$V\[ǷKVb`8HMm)cJBCWHL!5ӛSIP6M 'WWW)N^Vtc B)^2AMP9gq5@= &XΨZkkG#s&"Ȱ9ؤpK$MalnJ1ac#Cxke xJ*b$< j/1PD'5Fj_{`D.4. P<:9"0i5*y)ZEa^l8{/ʶG%}I?ԨZp7ǻn)+=vjv0lؖ{-T&箰m<'KDDیxR9)B )|P w> ջj$!$Ηh?fyE\DI aZ4ˉ hJ5re@q9@ >=5c`G}1|nf݇ GբipB'j[65 ttG&\FƈTSlGB/ЩRM0J* Q5:J K BN ^8fxQByctK%؉4[.C<-d)g4«`2ݜLʽTm{BTS]~hbv >O;?do|_M`Ir]wL \merˊЋ?Z_O͊%8jNR7U cȪR9cf6An+.]|>k,> ǺbU$5瓯MvT^tXc;tXa,3nJ tA QKV Nbmu09 '܆/Jw(]WQQ^WPx{?X T%!R3 H20 ofT,?|Fg WUi i̘"omĊkX[D 18x wO7$jFw&_n㼣gc`C`xϾQm$wP+ Ͱ#F iⲟ=Gn|pS_@{v|ogwR-HRk%zğ6;C&';VCrչͽl MEMHR)T +K e\pϭMkZhiy8N~}Bgg~3To2ݖ}nw/{e(o~f~+9bkǨ%L+J\FHKb8`dB'DS(;HUUG gfpTkdP!S@\a.6g>FӴ)fd_8cǝ5jkk5ӊqȅIA-ol8I"S Ad+!9‹ucN ,ģ:D4,0W '-8B]-C!1t$pУ>\{`𙇳GtJݒ1/>qIX X[ohuW!WByxT:ztDl?SdԬ=Se; 7(z2"GksZ鵗։\y ^nx~>A~FhFzLAH[_ƏrpޚSM+jacfRjХC*ç`kǣ `[=6Ԝs_ g19K10!tlIp0?GҎ vI'ڥY޻OHj1ga{abwSN-L*cE. eڜ]WȅjTʛfwꏚwF2UΞ25͸i.Xofmsv2ۢ͝-S^N{%'H\ѡlbJ`\RMTP$ 1Zg?袻I:tB@}zj"NF,iJ"~0njĥ(ϯ 00Dg`P>ۘ<WZN ~ndvpS7%_oZcv:8YJ_\\7 Zk+/޽|C|@wU#ڪOgh))@uFhI j2|tFqҋHX *0i+ q.]A$IJ[$BFcBbkô҄pvU)tJNum5$r[cቯjO&L)+<L8 U9rX)\b a9ӲvB&BI,)A?EJPO"AaLA&p^BqQ悠 Rb$brNPzQ J~!)2*rvNzj%RDg^oG1b2N!f#qYx mWΤS gh5` #:g(TF!,("vD-I)d VĶ 5@ i[bٌtS>RlJԳ(ЎQpJF * b#L.t`i>+ƖoOGc Y X'yA'j>^i1s~̥ \O5jlNxBy䳀, R RP@^cn j\BEdu >JOS@ PJ# Yq̮4aI0 |^:bRF)8 Yg _}}l6/PS>,@'@]B(>Q$iI,)mH9r^ut,Pn_ z9/0I:RsHFBq)MdŘpE2N:U j頳:;iC EE3JjPAh~!Bn1D8H"tbccuU矄&' ۯYKGkno(6GAx5xhC:zTϼ F@.:$M6:܁yDpl7c[ɩ0_19Da-\ ax B*<1qH> @4P&9%W.h崏رG9X: wCC7X|!|n^WHcOȎؓH䈓y@Bl<&kk/ee;!@\^><;0d|lӒfnxf*nc"t=)}̦4${Ϋǝ=h揟Oy9>'S@-|7Y|8zQL۝fƩN_VuΘOV_ܠL<8.V_~һ>0ᶷ#שc'ͺuūjcpf;0OͿgiؾ E#Q8%5'yQ:QP=i%甑J FDTjt~|oFjPpR**-dQFg_S;:V $)T5Z؃ 08b8Z駞{OqΦH .m7kt|1BCaÙnJ40+t8C.jZ2`O˒07E9* ʻFo/ġYϦsz*}69U2rpn^voɿ1 g&= y]h.F4FDL6^Cx*ګj<ؔBmdRJ*BFM)]KJ!VpbM1 BU[*WQN?흗 a`t S;q`ɼ}~xS7݆Zw!n4 F=hJ%s+@=Uk1Bg 1;[l_f"> 33ET M2.ƌ`G9g(t5k9iDŃ̩VU1|VÔFZ P԰$N!DLT* r )6PbHŲ袍.TsD.t܀"yVit<SB\P MsZ7 fīτWQ;=|}C8@<뜕Clċ^*uo,7}BMFV| 6W)Z24J>F1ƃ F\~ۄЧT RϗJ˝ʫ(GcPuCwsZ,ZUIʳG'= ѴR"5w8*S`-Ҟ?%ֳ*$K KPf]n ڵ]Vj|@R5,U b!A,*v,Ȥ6V/u:h$4.g>J 2s g H= PNPg *?>UTXՇZ*jbsQi#m׺KJm8*PaYX!J!Yr4*)Uae[rZyy>IDgӳz&0Z>F@!ɦ2ת.v-TWV#+4A ǂ/h6zyUMIdFvyW},XwWT쪲QUkF%E2^D4ǀ`lUP+YT!vtsʢ %6\DqN]^ &"U>;mh"5J `0Y筴>+d((-ۛFmR4ٻSQRQ=S-yvyu*1W;``o1qߤq5&\ڹHMB˜l<&VOvo rs#6(l,V0 4Cbd{=D?RN4xN988hu7eY=,[ЬպQ:U4?j75e6X22D{bTQk **J)e-)]_EŚb01@rvIS//øՎg;˫:7[lvԍGte _-t?>[=> =v{h\]^?50^ n-u7ZZs8 :䖂[Ƈ;_9N0y12?kWt\"]̦= ά8[LX綧,ߺPu▘ߏ7Bb{k]m͟7WΘktP|v~NsM]$T.JB(Ae^$:!@`9lVfrv~v )Ft 5E ˂UI" x)$Tѵڬ1J60%]P*)e1Y0d"FL wDeZ R e RUN njRQݼb]lbl~qڝnkqx;G`ڟHr|,N_b zY)ȅas(]1-LBӵ*A*/7>՗][o"I+fv/ZUlk~XiۥV\NLQ .='cLA*$2|'jrny]`R(Z W >x0qh~F.i }uqKtJS/LE^emo,Ft¨&=]_^=iGٮ~""3EeJl p{X;gQh^1.0Lt|*kU_2o{OFs+4.*nBݫr SgspL1"|4X ^19وoh_+#=^GE^WH01M:uGܻOmi2 oTs+FuٞVN%(阌RQNe%8ӕïR4L0{Y]pޏǵ[U,(ـtqx!Y}?GiMP.}3g;+'Zqo@rvkr:h';ز1I90?9Vh$(q2B2Q MR&X1 #3pRKh5$-#Vyp]N@xčf\b*2zq!Mg3ד,;.)&'a{tr?|VLE<)Du2WHTTdh%MRdB 退9tQͨN1&I.PmP@j:&YtMJi*(%TR5cqbr]XL2Յׅg 5rK{e~hھn?ߞI sh"#{Yh\頄W"jfF߆cU)@"A1Z^&’To$@LH 0`LP% c>]$`ڹ{1caJ]kSA5Q 2|P`B@G ͝"k#G1v;( v;3-*/9Q9 /i~4?k8N{KkAIJ+炨sX}̑`:Bu$l ¨^_gXck]&_0{bPCg21:-igЇ/y{)nOWTJpB&̖ eV1'8ÁHT ƹDlzuB|FC5a2-m?vax.e#U,ڵ8\K?Y.fEqIP-W!F!Hc>h( F9%Δt QT uM6=@lo.)aF;z^˵;(7Po`M?Sa.kGdy:`E=B[puyb`Ơ0 cdžE^%N6&,Zm\0:?Bֶ˘nm&ӌZKZn-kèGڼl"6o+=oY)J͑И-lG#<.n2@llyJMݢeUܣhhy1os\2ICF}ݓqqgNYWiXINzPY Mö#D WEMぷP@n m1a '$yUXL@i@,wG8e}R)xBh(zhٯ0!$avוdߢt::]s)}iJ-X( R埅Y0wFUX[9BMH3A˹ZNK[HWƱ>)y- v=uJ1Oƫ8i5Tk@ }[w)=ᷡ3ܙ`hy,>T&$m3J lLJ$  )̘0Q(ōA{Z9c`5Іit8-'`2X3'Pˀ2#@ ;䞋9agܣ`G GӢipB'f[65 ttG&\FƘ)b]l/'Z7jiJlTy͹XIZ@aFI! O3J(1@* CҜ\L>DDDGCxZ0GRiWe69{':jw%BTS=AsZ_95OsLaz],~YFy6_Y~SctcgHejhN{[A 寧gXosI1l1Jj'L#d7GBQSGA@Xm.mTG18Ѓf^snJ'nv5NՓV4[]k7 +6}ai +&躋 n \ب% W'Z*Ü˄}o^h'~So}љQޥ:KU˻T]A4Җ˵ף"p"fXc[:چbLFѼsYcS>d.yfR0d\܄$r)@@+:䠨PVZHļErGkz*Bt` :Riy ҁ%LX4(4UQ*KJkb٭{)'х$P]z]xVp9|o+qfi}mu ͋?Ph8r\bQ O!.2½geɕJx%f6jt816@X$)Mg\vEkhmkރ-To$Fs& :4aǀЧ| Hƻ+zl>Tc1f %DiH)F KH>`(PTs@{a;ME`>kپ#G1v;(v͛*jv_18/]m+G+SaZXi~)E5&+ar Rba65JoX^T~|ta9ʮB\;RɋC0d3Acmݣsq-"`VtY7=o n=ɺ_N[Ӽ>3\w t\{AA cdžE^%N6&,Zmāy1Bֶ˘n=QMoZKZn-kMmolhwG'UҼl"6o+=zز0S›#O1[[fԘѻ|򪼨ip<Jȍ\vI x2 D$u+(HO*O E~Mx:&|ơ>Ec&ݦNܯE iu_jR{>/_cNJ|XԞ,OPygT%#T9dh/?f!:x!:\!:a/m~5gC2Va\$& 'bF8i5Tk@ }\V/p[Wy&PiŕݵV4o>B_ik~Se{ޣQr @6#^y`ƤDJPxJRa|P*Zg]Y0k u<1g ,w?Z&0-NDLk4P\fhvsX  nM[P'l`=XGӢipB'f[65 ttgZɑ"Y$aK,]`G,y%y< {e^cmQlDD+Lsp|5&ߓv饶CBgBJ+@:Y$t @\zFꖑcT",AWNdƜeQhKm6sJă `gfax , enנ}?eZYiџSfzq6Ʈ;Q%_G}حu;kF.6]~D7ZjhGM ۧiFynyd~\i!xYyy?'5736^+ 6sjKNR_\ iլ88/#/ynޒVg TfM-4Bb=;Ɛ>FUMB}=:2"bzgVStuttlw|Y;x}Q>:UBHBP TsΣM:<\-OEmAJU9A$Sbm2Qhinmv_od߽Ių:ګAKF IrnXi2kgs@;p$D5Ì&4d^M MeGV7=O^(hw5h3Xu~ƕtnbYo"w JGdb 6'F)J%x?+CVA^Z ZeǨ{} T2-YGzk F!-//_31 `'H$dom|T#=6C>]X'X&NǕZSVNڞK㺩_No"TP9+\Q1uJOYkP 9VeEϼ0[<oӞْ!o˥{(mWo2hMY.fzMФw-tΩF"O^$ﵚh5RZ V/X1cBHǨY98M"` PI&It%t "H0$eQmOLhIs.it :~`P BzdZ9᏾FL h>r }w&CW>,xPv}hz.HE0g.Ihd&h,D2Cu)0Hy`< uDA(Rvɗc:iAylXӂH !]JI Au@= FB**QnڦOzi?KYEd Ls+ۗࣇ1{Jܚb:f ML?~gQ`FFG 9㣑g8ǙG3MYkOFhQ(R,KJȸҾ0_Gb>C*N9 .À'pM(kHY[G)(U:ƙ g?\7.WyCσtckwZPgqW;^P}sZ ǫWUf\9?wTIfUM L㒯'$jXFX0E+m9:&Ɏۛ AF u*:6YG)(oQhYFvk6dZz DH)Y!V6 ?Q g?22wޛB[mkS"[HǺ)~Urm**Y;m|D",+Tl [yU9V8aT3D7[c>gY, ļ&%U6劲IČJbKI bw[>,,4LN3+dKGzLJG;-\IU|pKo.|c)vsx//(֗ HSlXLf2F$]- P\]ǷB A}~eRA( Ȅ,o>hsXj쭗-dzJa'z҃Z1f$AhSNj",:X+35nl"!LDpUŜhT!tNH'ѩtdy `&wLaȦbA;9 ֝EVk÷caḨ;feCa +Zh@GsQ E05,ʨLH3{=V`Ic(܁s~Fd^54^6wMIhе@F,`W 6qV~ ZI֧ldϭՈP$J)'a=g%eEK2H2NA`u 힭.+瀸gF;#i7lpހ6xtJ!7?q3$7{7:KWx8ItjCEI5(ldY$#%?7`(`-GDQR$ФJLFH] OZ@rSI`S͇ٚl= d"2& 0@ ] g?KiMs5^5{Li_8Q VzDVqVdW@8ʂל|,%yNE AP%Ih-"Rd-Vcp)dX@o 4[%{uﮆs`/!H))&KOwlCeRZ84fB; R̠p M~RnCKeII5u1+!X2:ikeW+tP f!z@5GUKvz;& :,@$H1Ak8RDf,oX$@|5OWBJ}yxlB&?V4}GAja ́ct 9Ky!-}|Us5>5.IosցU iV!ХI&bM(c2Zr)N톨`[7/i:]G+@3\ףvw[û?<.Vױum^_ǹ;9REF[”V*k\ןi)t=̭4N93nW72Ae'y[>-|m~}oF[oKۀ)\*|aQ1={<#?I-ezsz')P䊤0Tx|Ӷ=`+A= ^uzȗO|?~Y5Q*ulC2`z LBsw9=̼($(e5(c 7@-1e"pASG *b5,,o-B1zcWg9Ѐ8o(b[\]#5^/Ctj[J,޻~@Ex,6`Y}UdfdYd?4cʮ1Qc0~\wOA@x׼^߫xNG?fc0zn]HZrl'%ilMk,]FEn2k'݅$]IL;:{G_7'  f_/Q+Ij?,jekIN$QvWig9&UA;]2 Q #:u0\~GS6`B"Uܦ>\UZuViե`dliF#7w\Czӵ+ߺpAt2V,ޠM*uҚV ڜ :DZ F )h-6y#۷WgTcԻn;߭RxSU &K3o&MjB1'Zj/eBuؕr mq}sQ Eg)b/9%|^{ wx3h{}^Hephv:vbMBM;JJ1T03>MBȥa1N{'l~%B!}tBbu0bAm]o>"U-g1(sYkO/Ƨ\5AҬ*ҋqZJIϼE*)IV:ѝCQo`]oaxXkQJNiB0Ǒ֑~B_+鸯^D6G;S7$ԓŢKS}h~OQdO])iDw"R5V@&Y)dWH! QM6deGގ0H%GxHEn26г+ "[ݫ N6h < uԡn#Xe~R7PjeW+q htyֲtX\s ]cb"uh@ ]k05Z0+3\i!. FU5bF#)Mٰ֞ sh8; =׎]SPP&V*`I=Rƌ"(4`k۪npWO+5+0"  C%Nih2inB AvEmL e5͐jPoB+"X`ܠQ @rJc5BȂA m3E"3|5g AGŜEqN0tAC\"0S|14KPI1ά :P%@ qc-~AAN4. pqT AP{ TtgB@Q"!\`#)#Ǎͤ=kGRl e  ~j2 R+a4tYx5i<ũ7KkՔN"8on=xmb_*"u44U !ؼ($0D!1Mfϰ1pFՂYec>!"ja&M1BT8x>m:@GVZCMLt%ChRU! 0阊'8$;ǍŗX~AyjN҈HDNW2^i C.knwgsmC^LZ@mD%|uW@J1&!Fuu $$5(ڽXRr,a3m>%TB#i @L>^!B ;f!T3ڄ`p޲N+}Ƣ Aa8J "(Y Zc~Xdי4HLp8戱]/w؊D4cҠNn B`~?"/HV1Z8RTFjt01x֣cg&]B@ =JW6"R 1tFͩm X{u!xZqFw -jITAlRԞK=e2, 6UK.dP?!\뜼Y%kP׮BT`joZajlJ؝5z X[TqcNZS0²Mf@ beRt_H O34%7aFD5>"OrStEb*qKچkCWQ b~"qXoCnRM5r7rid"&|9\j+*xBN Zf LBFjT'3twECPޙGD'eAA(E>^ܼ\^[b[1`37M9f!h #k\_RCzr\O~?z9dtyuV@];!Wv y >iprq {7̏ ۰[sOV<@ME97,ъc`pp\ 쁫a W"pPR b+b+b+b+b+b+b+b+b+b+b+b \ڌuL 6G+kWX=p+>!jx&W \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pQ=J<yh+3p=RhN \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pW \1pէ\WJx)$Qс?6p0sՃB}aVJ*0phJ9%5W \^Ux$>!qWp+b+b+b+b+b+b+b+b+b+b+~K0 ZI0xgZ=P ap }.or'6[s9> 0X5avm;G`c&\hYsWd>p2GE`+'\K"kyኬ|n;p+qǤ`Uh+>BJS W~:}w@[loo\yN'/eu ҡ23aO>!lv\7O18عwL)ND|1v;Tiv_].NgtVa/i :/ u֡g n<G?]ru(WʛV~~D'R*//^-O۶!KσblXr?>a꣏ ٴO//O.yp@ ;]ntrw6׳B9liq~mat _E_/?A|bsTkjI#ᑝbdivl{3Į͛4}nlswEwʾ<v ,9cT-O~qM9l~u`x>u:yA;XCP׹ StI`Hwkb;|߮[wL 82WhWr6(UJQz6:53- _LVMܪ*.]`ߢ $/:p}qXi^$%ݔ6 1ZZ(i{ ۳P+6ڂmJƹlKO2W"j0u"t]$%iq__TO邨t^19z`ty):Ws:NߺӗC_zr>2θ'sѨX+E|ꁬTçgܯUtooO>mLlڳ W2^a[*fVcG}Lz)7­]u~Xv`]>l;7}xfo09BֽOfM˫໙>t?o57x)7;G觫!P;i(n럧/}r[؅] { do|~Oo/rvwv{:0A$ omSﻴGbv.n&o>u ~^^mzL O9]yj*c%NR)[G:(/\{폧޵#Eȧ[ m00[=vp{>pnY$cmd#NWldI2eVSjv=~U,V5W~s^pE><ϮGO<]삪.٨ya>^rC%|T8b2QGs!,e#=A#3pR+nU|QXL~/XфqnPeۺA2Xx]^O%q?NVXNYyvߦJX;a,ȓomg6&s<نo_V~#DMOT'k iKM&96Z$Jr9s$:]T3(\P3#gf܍R΢ qƱPuUE٦wCDI] i i'&6=?`oXc\hk( >pȕ!(1Fk+I2dF6Ґ;Y '&$- Rٯh˶hb:!]܍ߢ0Ix##2"jQz_ (3711&j bׇS?q?ĬۢDZQֈӈF,JUzn~f۫wIZ%UK b~yYZEXwԍKf_&w>qF'5RdVbmFn!]؉DǎZY:`cVl`C-f|5C>u &>Kzs|\ 䒭W 3->_3`t>n;͐S!eôUF-[gC_si|es.(0KNnZ)7T tL$А ,Ak$gJFpS1@^(VBnd]@;[2GjG: ,OyZpi0lV v<~ ٤e; `vy>\; "WgN(^X(CL|˫i KjJlJ$Z6~q`^'YږqYAN>iڤB$Mř͓|j}iIY%&BԊPN^ʎ ^|,oq1'KώÆ=s9ݓvܿoDll&nóeQ{Ob2fƜ2%Ff8p;n47 );eҒD@':}5+7x.PԤq8 ~r3)ɰ(`3V0"P|gDE┱J PXϙ9,1zqzwۖ#ix#i4q䑴9kBν6K?OLT9g_Y30`ywFUX[9BMH3LD'-OD'KD'1S>):˅~:+r(@?Cx 5,pk8EEtb bHBptj0ly-+3;4)N\QhWJ0gCR"%i <BT9$x%^*MrA5n:/aZuZKB褑,':`AQ""_&*.h; X32nP{l*-hZ4'thVqnCâN@|ndF )*b۱Y?7LKCID%٠y͹β@aEā! O3J(:A*+HDŽ9t^F؀7&b`<6BH Yʵt (9ɤ<8Iv쿗",D^HN5{bڃPwBC'9&qI0G>-#3N *jN;nT|mC[p.iH!גVJz4"t_N6y| Y>rh EͭJ(G&z^6у sbMuStGΟXlumMX{Xw+ΓXxb#qK܇<WV_e%,WBD*ι]ʺYb !tWpCohg?d8:&q`o1Ԃz "qc("|0{AZ0 Ax ])+=7ɇAބ2 /oױiFwPx{9F7)3Z) sD,kAgODiWvO+rg%#^m,>O%6{o[C[#ƶCZgBZ\ *πИJ(!j:-Q;**Ic2d*u<V}Y3ypSA3A*a5)n\4TkoF~_ jA*BDԡrW­Gm!Awڒ%1+vr2۵e M $K@X#"WpsɣBYi$skc&5-zg&ɥ[i1^RM/Vhenz448Ny]p(H3( Fh2F~^ùJ&t2Ȥ*(ZiOK<<*R*O\Lfgm8R rVr.[;Kf.ۻ_n ?Eԋ>Z7M ]嶏/y,aEsXF=>UGW͕__job3'3{7Oхف| i-|Q{ݍ"G<5^'[\!?_`1IΟXJD+ϙ%LO60سbIGųNzֶ'+  8!x0QrC.) 1Q)']HU>k9IzU7,RuxԮ*Gy ,׭[(ro^ O_˅y"xY Pȝ,2* ךor%Zh%l\rՋkea ]߀op̙1*N,v= p].`UŌpWy\z y5jKy5r^{B6XG!JꭥQgD$Nzh%@&KI.,T%5@bk˴҄9ѺjB: GM ַ*5A+Q`}}|h7 ' ɥSġsE*0prN %X9Q#oy#k]#oQ#oE#aLElrH $N 2W1NQ(RJ H"6(x<V4d̨lGwI]*DP{ [Ϣl߂b bZN͕a6U0TqTO(FY0ÙpfKqJ6a"j'MAK>% P:x [+ja-[00c]!xTjUzOX9>jNYH 89qf\9%F)ۅ\vW󗆪 @UZ ë* }Ȓ 5~E | x*FTk|cۛkx((w(FHC4h! K%eIjѲKe"!+9g0KP1͢j/G9J,F~Duh FR'g4!x5 6[2Q|R+OA1&F/&Y{`Ph\~.C$;-&q." mrQE @Ycl ZIcsS`5 PAPTI(Œ3(JD- mĤ R>r,/,g9PiD?FmC SD[B(>Q$iI:,r6Dרl]2>@O&IG$!d9| XpzJ fmL)jZ0t~R'ik\,8"0tb[I ZRxUze*WύXPZhH*N3R;QDqW1x# Ce| xhC>沑TϼKCD.5$M68| m5d>|/z+" (,|bՁ1CZnPZ?{WȑJAO3.)@cwў5y7yDM4Iu[)xH* XXɬ/N"tHj 1(rA+}"B`xs,Zƨ7Pdd ,q54}Jt#9lّnK] t'~ءSs-c_3r~b+ tG<.,JI(5 * QG!E("LDH<a UEykuֺ[yc2ۻP-9 ^lPCsF՗Я&8.o_pO$Wo"7؇߽{wKS&K-9rRwO PVu/G>*{}dvt; $g=}<|uRK-r+:UD|g˶a/ށT=ЭzW=|1_?ZXkϸO:r{nOB %v -eс)J50E.'od:E 8pJ&f8 %EJr5}с8p(f8ˀ n.)pctxFom057 \ՙحfΜ8fL=ۅwz04s"]H87imeEhyOf|d!eo}(qtyt=yz\xˤ1ۋ7f3!qd=ݧݴ_ͼ}h00 67@Z>.<n &2qtp5=ufSoI,Xs>W;ˣ^yΌe51cfk7if-U˭%.l~ȋag]3! F19"לp|{6MCޞՍN]F7O#x%n/P\4r {oFv)xB2E8RRy5f-rNjѳ(>{3\&bdS節5؇S3^tfkKlǾz"*lN\MUVcL2^!\1-=%B'W\{2pJf2zpŵR\!L.WZuUR5•`oe%˥z_ݎX(TV"Mt"҉bKdGUM5LC#f_;b#XGs_D P!NID%'ә\O3;Lg*,0 aZ O2̞ \ejy:\e* -p JQR.צMgvA˷a:y{LvΤZpkbS4L0~V=3s#9uDr鯲O`9cH"5Zh"*uߒ,g_0!3t"Ls:8\y]IƞƸ߸?!tv[7g?OkQM_›oO^'WQt-Uyl([:ʖ~l([$t-.KGQt-eKGQt=RfR5JY([:ʖl([:ʖ/Q3Ԯt)eKGQĮKGQt-eKGQt-eKGQn $e=$$WғyeG,SKϳWYdb?E$da^;L(Y&$#^y`ƤDJPhS.ATSRj%"VBEI#YN$c hbP wEG Brz`M{i<֢ipB'ۊ9mx W21Z"` xFMTf kul$."p$9Svi]r\rS F4 ""h!iT<-d IFxLPfA˭4Q&}iUxX=tVьh_,lIoy}Q3eG9 [ΠdiĮ;e)Svד9.=|왏 es~F5G^kYp N_._62pmǰE_:|j lVl/y}$oTn 2^ٶzK\&"aq!Yu }8*DhNf\ g/@fb %RD<(k0d7GBSGA@Jvc\Ⱦkj, CV #] ZΏ(_>.PDb+GCK E_"_[-k#˂{%M+U[ځֺ:oی\P4w[3@ 5}nomTD M)+=ը(9o|)o ۽vKo*W6{6ۢiH✬ҵj-}埪eҋhYH~퇙~p{D0M9nΝ@g%e$OzM"ﭛ׬4EҞ JH/W=u:Orb"NFH&"?jY#&pa$qќ"Ҩmhr*dd8.i aF3.TyHdˮ.pn0ۖ(m#*T*^W'cw#G@S6JIKM&9siB%bz ("n)=j11qz `z]ALi*(%5rv8fr,l3`IafOӌoa_z/x[с #$hQYCLA: k|%Q(DqÓepg6A DF '&$ U)@BMˈ9qŸ<n;UAnx! "цr,ڨmu]T[G/2f/a FRwG͐BFԚzD3A9,4A;2Fv PAK ͝"iZFَwY<m˸h .\oGQ;'ĖQ܊\IomG-vdގJ0;NP/j$HY~JEBgAzY:9o\S ?A>.hkۈ-w}6K ly= w1]<k0v:o>On@6{lfdrs|@ozF+%4{=vIҳJcz4'q Qo8o&yQ@.'5֭[v{Oe!.+h2P2>2| r ".( LV?7I6EO&+}H*NYuNr!Qr ecRD9V "(I9E !1"u]q /#Ōy jx󥟐.m&+DdS+\KfU-ԃ Ϙ&H SpĄ)#MY6 ^I\N=ʩU4ܱlvyxIU_2ogM_̠DN%$tqɍӔ+&wV>/A%dy&%*ع"X+"*< l$"g]6 F3;-ڪ}W(8IGrcڟo⢃OU[:1^^5%?>H0q4}3TKL-?>ٙJ]'Aky4*kXR BTҮ1Wz@R%ꄮ#Qbx Bd _Bc2xY@]16^\7BU Օ %QFLՏ߮{^ݺN~ܯse|+ی鏑8{5E:IJT 5Qq&w/;i_?o%1|jC>~g;_f^6D}O^i0kǨǽad'+`qEg0S{!ޏ?] ,0թ tщ׍ݑ~9sޓ]Tb[IL^]w{@[q4M 0꣟ˡ޺Eצ6C|y]0;f4߼xsfatP {\_(@/I{(\AM Ay1Bou7:ԅ%zי ysUn+Y'2J@fVDj3f`9jm*}z6{G] Z/ՙHR g9̩ۥ#B VUZ8ACU~Xت6O{t:kj!BSxV.\{I/kC5" F 'w2w%15}HU.)xXn8HDէh`a.%8zOyH-՘/ļTC&4=Z@ѡ/Ǵ/&l %Vᒖ-k1_n4 &o>TNA/- % ܀zmA]2Wߘ6{Fx^8kvDyY9LnU2=GƔ p4*+&)SԡLҘ^Gd`)јLbZEL%㝹z抉D/Bt/M~*t~yB9u]Jݬ?G&8 #^FIsi@npsecρ*[[?JjqepOxM) HSPgqc~ /=\ ,nԶ,LPRs~8QN(?_l;׽+۴LqeS5`K!?woڽFd{׿3J⪓JLx[ExdM)6&opw+MD<"O.lxrH!XnwvbY, QÛjA'fs"WpIՎT@CTZ0RiK}E,LڜCELr" OI9Y R$`N=Q3c ۱PI^̯Nzoꦬd ݜpըx?l2F V|^5gmA;T+SDAҒ9sQVyet^tq@ME&߳6ˎuB*i+3&<FdL&^S+u@zL@'<*pe%)Ĩ5#;]P }ً/\j8 :--~Ǖz"Ld4 Qwt_~mFsD$>*!&"*APGD{#BSJkI,;ÀH[1RHp׀;m|n$g\DEQ@24C֕bוӔ{CIkmdGE pz$8@-Þ/ű3).5jI)K{-bՍuć"ac"#w)DQH!e.BF`]bb:ƆdEˍsLo@ܚ}v*:U0rc0e@ȤEA(S12A&'qdIso;|3X)Z Uk/K* r8v9[v9q*f4Tw$Z^{V؞ݜ=rjp?jۜ.$ϫp2mػŹC6贷9c ׄG-rP1rbN"I%03(E1ЊEkDNZ_{)0 }oqFپ´]v W]cQrH4xY"4H&rhT ~6}ugh(kecm)cJE|U>xBѪVu['KŶFnGE*MmMOmU|\$qF*:ޚt5?|F^;5"#yd+[0~1 F~YA1xh>)0 M,hz Hՙ+AJY6DfTP͋+OCNy&\MiLգ;2,S A}D3Hw; \W>=ݟ3|N-Ձw ; dnXld6=&\vtI1M#Y:쎩RxǷvΰboui7qۧ.~O sP;{74li;G.loE?wF64Sn_f~{&+#C)M==Cdwzq>ꎉǛEQr[NT}On6OKccSOЬxbk[LǸ*i¢xUHL\l?dswXz‰< YSN*X\/_V F'26[H\&q2}yՎe/9uYnRO{ oj?>D$} Z1Em29`-xJ,&΂14f&sZeB\/:xI}Qm_.:˓= g@3b4{΂AkɽKY˜"DiQi $ur8򑡮,th(Xnd4вI5%=PFj^!\<΂Mwn34Q$Z D/M$V TMR$aBVWY4DדIb7Ab%}0Bld-Bv^1-SAznEVSTH]][/)9Z"f&#=2L\䓕*lHth]HgL6[tQZDq6EmnV<,L}R e ͬ2whR<`L}TH_֧Li~Ndzr2y_sKӗGYgw'T-WmGiES7·3&J)[~{n2A/omf$[Z%˭T<NG |?Wn oD36P# ~zUݱի/WxHqH?IhH(5,5iفcC~QeI_]\!Ys{c]ΪhGjV)H Gbjol Ipz20&E>b)4q$4fBiZ$hGݷŋ,Cܧ֛O˭x_o!7_ 2u!# P_184I(c&Sʩ%lssv;HЭ9ޝ]슠]M l{MT[-X2fAneېUk>d7-;:"(ܧBo~$y $/^LUo2MlY߱3-Y=Ӓ_VBİ|2o'EgQ딙W MD&a!{\^sfV1o9p9dzVKxhEvUVPe .V*ch -g7DK;2n0Y֢WSף^]W9b}3ʻ#0 L&MV.lW%Y:ۘJћHoMUΨ)X)vMY'փFL.Kr`ҠPgbR9cfr^X2兺o>)4/H4p 18N76ax=sBҌ(E&$R9*qlH",LQW&Ȓ>q&B)Z-.R.P:#fẔ-g7QZ1.υkWsںڽ2>rG4 )(YjQc , -A֍Ȍ#*?JjU Hh$G/9r5)XN/L,Qrv֨/|x.}MehzsĭERVj ҇B;n-QvsořbR8ˁl:cB@́U*GtFmJ #ҀHF\LH!Z5%ͥހMeXm99ң'ҢF%EW/ڞ/|qC2RsODE/9S:zT':阤RY// ?Fa,b7q * o룷OM橨hw;GAf#H&5xe=6VBy(3/廊,pz8(/}6/!غ >6w?Zf:s7J[IE>^XirɃHZKa7ReYN3iNH3pGj@ȬdW\{^o:2sA>aLԸ.mtʺmv+7}+' xYe=nzܜ%n,tLI,A*u1\2FpEP\+ %L%1E#"{sjAKyAR(0W'V d%h- ^!;Z_ηێg3;ϼh83at^:Jer~30s鲛x|s T"LV5kxNhRK&iSL}|FqvرYa6O lJk#DPtCJJHssU݅G(:rtF-Ϲ8VϒltГnJ>j Ab};ʇ[kUĴFDF$+:B%D.(fp@qi%"I"82bihG(Tֻ6rKKc8WQ OʑծXm9Qz8#7qp68vEoYȲ$Tu˒o*"85/Cw#FWlF& 5̀Όo-iQ3>;E+YTu?: yG9N,Dc1Lꡁ{̤(-xV&]k1L)V}K- . 1ǹ(d):mHkՖGiLg2}eDN:;tIJk2iwõWJ[z/D]T>Km+/7OTr ,rVj3C0HM pdޡ3*X/ wFUjRWd6U""A,0HXڐN r' Iȑ #IUnԤg m"d+K'Xxq8&t$d6jP0DeU[Ξ8kqlRPK+`i *Őd }Y6.glP dL e 1[m^Ψ'ɥ3TeRP{Oo %ZUi*QV5U+NIKwMGDR $ᨹN%Ij@ňuUK$qJhK&UJ'GqV6hL&f0rxNQ̂MJdJNIIFi%OOG[=IlsW#CW9͍-Ps8'lH&Mk{r@ڬ&ERBh7!cG=x,zUCѡ*C}D'mhHY֔+B>6, s`p (:ȒGq_X-[I:@lbɪ⯊x{.)ҍ.)-'KuMm_xx,-֧FY!MQwD&bxNH:$癃<+sӛuOLR)brS*)//(Q 1dDRz);X+g1.ЖTo55Fm¥@Z/%:%;C wGҙ;X{N;7:``ޢ>z0[QȪbl0 (&*BZD^!=8zep5.cj X% *ҙL:(e3Ӟuef;dLhwH5|iL3? a$VSYI%pFkYUt>eD أ"GhU^ .W,ަU67yߴ0حCGm߼~Q+fQ:ΡeUrDVrx Ї:E޴?3.M_CT\?xr[)>Lf1YOƟ7\Ѣ?h7JxfȌPڼz|n "̋=ơUm'|瓼i]8 Zٰvwu6i~`˅Z\;h:g?8oiדPhPX UӓSv|=Y *,'N7xKk`UWM0d1N#<[VL49`!2uM}oyFFFjї>@" {p9K1$ŚdԮN85HtHʢhm ,Q')DιTdKkPk=JcR謳񕶾ї:TX+J= +ijJ] 7Ij.qHQrJ6!XEHYRP)EՑGGQu)ERvɗcФj lZ;B)ieK)2"|6!S~(%tsOB%IE+*4m Iz~()!+ALsJ\^lr;G)4i2 F A (rG#a3{#ř#VK$ok) 2.ƬUC,3Nqfź)C6@38@5YQg*Ά?\.rvyk y}Fm+PuX~ӗ=T}zܡ%+Um]?CInQ6mt:1P3J72,2Ϧ]fÂ<}D Oٴ7N#(D96)`{26P`Dr'(8 $ćlRQ2sBC¡23.BWnܙzx5^5{LWb~e$%M8++ eAל|,%yVu Njk!C˫*:0ʒRL&eTPNGp,/)9x8diI-rqQ&ޮyq޳zNQ4%o Sl(/4K .:0nY(—DdvBr(: t\|2_LoGcⱀdQ0 G .&s537,C L }yxlB^+ï׾i߮eV5n1W2Z(\^xH,mfB0C, o{sށUPBV!`:H-cB-p9" Qmӭ8;ɻj4\mAgm$l.\Fw dm^_QwR Z%SfXe&l8{+fy>_lrK?ѫݱ-O! ս{tJv:˦v@.]/v_>7R rЦ36W]^߻ R1xunpkfz{I8H$z֚?Yd)FET+;mc_۝{7c0>>?ݰ,+\Y^BEfd/E412\ Rې*5!&C11[l/;-er֐V$@m+Q'l#@Fy8wǝ9hH}_>o-R@["o+cJHUQ[t^m1_qot 4h!}6)ZJH'$֐}"$'t *ZIR.=#uHO+j@Ig~vE`%,꜊e,]qMrɸNjiK. f~kO v0tY_՜u1V"}7Ұ4ӋQ5vݙ*2aRwj6ՇZln?[ky 5WNS7>A{mǯs,*A6(/+O_]sw? ^ /^m$ո՗Z;I}6Y5p 0[ʷAWmBl0isS SYcHk*W!?Dmj=3y5=h'ca7}X| _`;;mg}oDiP/ߣ ̷P>$j{(Za(ɕ89M'a:8=xl?y5i3Xu~6x ypPv]VxJd%(9Q.D'0xemN>RJh KL-(ߣVi1*/p`[OO!Jf"(YYoQHK䫫[%:&yR$gs<3wcXj@= H5OO.ڝ$zҫ@CX:Rhʊ"MW銵Zo/  Z_D +KS ˜_UL}BZ}l𽾄V9$0f'X3FrTX:e}tk7ѭ+q76N.tmWͶGZ@ _HUlTŮTŮUثbQjWb{;&>9qa5itEHB4@7m?).QH+f<ˆ#4g\E|sz:Ya?Oݩ,f~\sY8|hg&crAg*.3=Zr`)0*h TגQ?f+JyYeb 6(/ ̋FK#.B_Ao$r]OU KNO<ɘ ei)dە@*ʲCn ZSSHpA2MBGag'LrNt~zzݒqrָ3dz }:5yNYfΡfྜ~[3Z>Q gŞpI׾|IVeC8iRa@ B0 H&P-$FCJmbԐB[$,I[ 2Ĕ"L Zp%cM4SSTP#e; <+- Wch1a|v6m81ۻӥM/v7+X ܲHeY7Di^Ǥ5 p6W;OBK±`߭;m B_|,\ Ά~g=qhp6ɞ58[3(Boy`kWo}|>KրL%fE%I 嘵+KLzjĶYg;+I* iK):Y2J>E*kڑ%*(r1F9ݥyoг a%ϾN7Q~r5ltg INlĎ{TBG;QA#;ԚՄG9y(B@`$Ɖhl0=4$>&JA+ &1%оKmV*mpB8@XdlM󀖒qr~gCqap*1;m`3|M.?u_׹+WyH@ϗwNW'zPZYTIhQsYYL')E9v3Wudif5ib&k kP YjóuZAb=DHdlb1 $FDa)`L \_pm!/2QY{Ϛo O.z%E,Rⷍ)YTՊ'4IWƻ85131ȓI3)*``L`c锭ERKS pTN3xdPmpz:̀2MH"hg=2Ȕ")yjS@N.]:XkY70X[b/z+LEk|2 !(XRe 4Qb.%Aԝf1ɫDJh7!I!`㊌uK<:u[\Չ5myH*٢ҵ!)='ˀ!``P$M^5Wio _Va S _8}w0/CȟrQQGO'Oh8x'ʍ&|Ikv]f4>MJw-%{ldZ(^erdos}}{b~=-Ͳ'689 9sh/O_Br b|LW]P;-_v4dQ5#56edRE%dȹV qJnjjؤ#cⴖ,} ˲.]u"׃OznW))pmV)u1!"L؃1"Y0<&pb0&ƻ`oġYM4Z|54lvpn^vo^ȿnjϬ'\Ó1}lQgːt֪]^"x5\^ =WC\I! !}py3̚!yYx d@H+Q9(DuPE#ReZ `/m*Q3q6( o{o|Ѩc;ma7~)VyBԇdsƳh~Hɇqr S-t : u)byAl񒢰spQ;QRTx1`D*t4!Y%#HJg,@EWK]@WRT{6g]5{nE_vo&pV*c/E1WzWxv>UI`Y |e{XF7U\2X5rSV o0uJkX` 7pU5v_Uv\AR 'vK{WU\-v p A }MW< EBEw|wN:bY7|>4do?_GvR4#tOR̿ !w%/.>"TWzr9VTvQo`G)H$K{3Vqp*-JrRK3orZy\2 ._hS%w[ݡuB.S8n&VC/W;8/:I~=>8vЕ֮9~8M+/ϻuGg &z}Iؘ]eablb˪&@X,3wY S@В mY c+Cm`g)Į3LB2ND @Z9*R7;|='׼E.x]=ޜI=CT4R 3I*%ΤM!əMLI+tbVo2:\ # /by,!Eg-"{6U 07?p{ylV:NNNvX_C*ZM <+̐MdҔ!A_Dgqkjz!Y$I>ԂV%X(CvVP@VR T1ګ@J.E_YN0dR,օDPrbFmnDn&zWVX}KJ/9imޏ6}YMߌft㚔rRU?>-mO;|v>/?.&]>8:[RDvKMUq}ʟkmcISرR}`$.98nїjI1u9$%Y^OpHQxՒF8͙KZ&lu`^+=,|~|1_k4-gܒW҄X L|`N4o>Ng̝~ıkcV3$c.p9M%2c*n'Ѓ<_:QRpڝ!˗sH|+m"]*{kUd7Jl<-èN[[%N적[yV/}4i9;. iՇ+<|̧DK1я"Q㲯QR)íNoh8=X^ozmyY໳fZG_GDŽ:|Q>uΪ/g]x geoݲLQlsV'$(2N"dJ;M蟤.=fЂ4|ٷ֗ .zM̉ d$iZ& "2_.CbD%!G&osog}i0]M&m0-+7޳S|oO.RA#D.ش͟?Gg$ 'qlU6ݤd7JCm\2k!nSnJSjߎ%SztÃ>a<%}=-c{D>}!qiHH/o89ԮPGB]hF~MYЁo^AA}}=;>[b8ܝ _KnݽB#uK~Iݒ_s)_+Ts޶4 XYhMqx+D+Jnt-d'!f`YTU;X2^s_gU]#je6XS.CRr&s9x2?mЕ]զ|#d}މnXjO Z̼Jm"30I˳w\H`6Jy+x /9w<**3N{I7<"R#"s*6VVjٮx4i;-Id7%'{#N9il@{}ؑL G2$).I2hQc Y80-f^p2㈿Tì4&XŁN̐DCFbMd=})Pب8b*g(g*-+al֨?9; "V}TFD3  SԤk,j'%Ɏ S MgL3sܪ\idɈ4\&" )D Qs#(YC4jMg;"^#@8m:ꓯ싋2.\$=>Sc sogk-趈ۛDpOXrFVj .p ^Ȍ29OF44JsU MCZ2 %AkB; Bg.1THSBnӑYɄl?TΎនcs19v.O= 7`㓰LI/fqK\|=)uhMi[Td CߊG}?.^Σ JYD`^Zc9YX2%ڙT,u"< "Yx^X2AWڊ#(oف8B(VL&B ^3E"%0KB,T ]2F E)V^nS"{>Pm:ہǥD@YGGK))x[r:gkFl>MQ3Mw 7uv~ήEhSƴcX+4\Tdp(TQnIB_ 2y dvzyot@$AXW"=t1g` %.{Jd5wA}?WdkD UPBq:3ɞW$ (޹Ai}/F[eK-q(dhۤEj[v'ai`yd%Tċw~<h+9gs%Z PJI HLDžGB3\`$+JGԕ}%?ytQl)-5g':$YWɷ vU_/hk'M MEAkҁR +cRw}prԷ d*6SjMj|IG]cO?/{?i\g;˳KSt+ |RISVvf\!g9rF"΍ٮ}uONKqy>j}M Z)1(ќ' R%ZI]PJq%dKBv){.Nӵv4 E@F^QX27FT9Mg?ȳށOgyMP7_Ͽ˯=lS뱭+,K"*--.Ky_F|2·ՊA‡O!>JOzQGE|r *?='[v9MW|ߞ6[;ۅiԷ2;E;3$ͬ\e0҂YޙXp87>MeOHзHn][nع!/a:vӥņnp۟Zh~om/jqt4Iߙ]NΦs= ~ϣ9T_2ވ.u =8"iݜ_NS%^}E<u_? ^Yk#ãlrs.%7q2Zro綁@YC%Z`ax=K%\1=^/]ppELFrdL`!$ .^g4ug/褶tp$1JBՙQlxxP)Df vZ٥$IB^,ؕ zx+NZnJoyi{W]fJbkcqKJfn]=9u\76%dC fLxZz|Ò7#&*ѷL'SkL%piAe&1ᙨQ0cN-EV&H 2Zw`p%'mQJtYƜF 8MPP: RKX{t3ʂ=Lq⎡$lĆ֔jyh u<\%k`]zv|DeQ\hn2˜vA!fܐt1nrN:DgT^L4D63U4xг@zI#)! *M(rmhNKHJp A-^@vkR i-adif.t6m&HVd= vP*p^pج *:L4XJ[70h[=rMD/z&"*_3lQcf* a66D%ՄI^&%F{g, QO=d7}юjvڡ~F',яr?4e7Pm=iD8Y$ &lXl(hqQhV4^!@n`7A_ !Ob;uylyk~I>⎷Ruq#@dbfGw^6Wᅦ 2Cc&r^6A h΄Щ@^%!Bb ]"5T61 sr"#qeGj9^8a⤔*Xԋ^mMn4UB2SLٻ6r\+vxH`^y90qG=tKbY:IYVz;])V@M3 UG1uĵ*`/)V(MBlgy7ݬIÎ4MWS䷞ (yw/L^s+%B6ax` bJ.. ˅gW]av f,\=GyR`s َXZg\#Vo+v_JbO &~`, E'o|SX+>.o >煠-?}š\H>$sx"-AjL%8 ح3ԙfAGP@X{cM!.R1D޻[n10u6fW"?PAB[.>yΨTz>fB3pir\],IMov\H $ F,1@ ŀu6<}M' O,C@6z]B)c*zfhΗmQ: XWhsxP( 3m ilsV\8$.6g{!>b a7BsM]peNjuq5vk./˽ ζW|xԃ,^2Jl,)w.\lt^ AubV[Ԁ1%EH  %StGbƶ΁ž 棓\*EݢuB_X‘s ]<+ JoxM{v ݅P[%XXR\KR|!F ud:6yJ=^Z\ʅq(دֆaV_uS E|BW@Hzky͡TSrY Lg< ؕԑv^[~QB[c͠y;! Pټ1`LټAU:ycke~y<[nb!Ss7S2JFzd󶽏o'b` ΈnZ6 ї $9g҅wǃ1W>_;\qnݢm[~{eoc0GKLR֦,0FI(ܠtFª3V $x.%"Y.dnM߾y'gwKhEv/:Ngn`:7Y+hqv!>oʮWnw~yyCk?U2G(w89'n Zؽ&=IzR XcsI2lHIߣ7'y:` 1C}ЏǴגmk@n`]2KO-Qjpϔ'T#\^ wK=Q(լŏ:1fP;7ʜ7z:a쪶4VDHR!7j PZͥ7SDr/Cf,q8V?pwͯ W_./w%&O8G8b}k;?owovTf voxe,{ެ‡w}qys0~U=tOm5*R?2 tףC?6z}it8=޶_)ɧP[mi6zxO=zk;=k=\]\<ӧ =bʐ Wm鞎[o?޻'BmCC;$R6wmy$^S}7$ Ix:sfnB<%0<]#>[J%٤yਮbFaR̨c f8!J3 {3%Q[cկ{F~˳ ".+L-c>/N.qo}uR:CTd\ߩl77OsK`Lk:z?W8psC4!:Վ1^]L AW˥ DQ0,kq_&GQ: FUCKnI}:gz׵h6\36ح׹Yf-OQbT~rz" yP;T$j"+,`W';x$) V] ,)"fd9~P/>eF.Xo{|ij\Vim4.f>`~a1[A]!Ĝ)5W`k'ҷ {%u3Ԛ j̓ftD?4ޢtq#]ISVeeq$.-m 16'I&\icgc9G4ԧDPp5ݑ:}L!RjG)L[M(6J=9 -m+1|թ8d&;Ve)zޓ.@b :([ P4uNʡaSi6tq n}wLf!?8w q5֚tknr{__ux $F6!aN6ydO· UGuj^Oq$4 g"`jM@(: s3G/w6}_QZ%[Gƪ;x@=5Iݳ|vUW?_?zBcj# NMl~QӑI`= w8BG!XgCG^ʛGOW_]&G*C6*8L >O/axQ#>Z*)\"uB=`]=4{BHlgET#X[$61WR (}VAHqj=u g?89T_~!Ed=^T{ ڸ\ÆѶ5plL0ҔZ Q:AsP V{IɶBY-mg;;$λAolfMvog'w Ep,|;70=ֳ2yE ,n ᚄ*G㉂l)j j~X XMB({+Z 7ή HQ%)-"¹XcH=O !`4KZ zmu1KISL6d^b[zJ-'Wyy]}|KAB_0/SHW-*th?Bc!΅tB!Ֆ7cd-tN7)q$H-^| 8xsBu6#H13g '.)ON'N8 IQ}ڨr:bahUĚJC$]bw_mbabIߗ mЯDTt'rVf,ǦI'\&(= 19L Xϸߞܠ͚MZ}4t1392P_j1.E.'8sƙ3b "P.2pκ{JqkSbpUKFImԝ%iVP߹p5{e.J !leZfccJ.8äo B!P2)Y %6IQai]olL-G3p,/==t<%6juU) 1+=%]W%,T/ nжKhcJusu.I|R%[ԱS9wrR.{F~6 e({G\C:Gsj <#22c'cWRSE~}ۧQ m '6d :e.UX\27yF@l޶R;#j,lG_*o۽NM{1ka[gs6N{@/{KQORD"$d6U1-4J +m#Y O >I[eE>d OIEq({S53$Ԕ)y X4{{~]USG1LL<a UEyM֦ʱkE])u]xn>_.`7ؠO,D 7|H ΄'a=7=:6E}{@\⽧T~u EZe} O*l3i3IC=tz"ﭗZ䂥WtƖUv$u۶Z`WJzlOUB #U!\>mUZ|ǂw$F^yM%\TW#'ب(<@_^[#Ȯ$k1ZHRh6V:%|Í#,h1E_~\JJff{j/(18-"Qo4Jw;+Vw_~#gObopv2FX5R 6OmC{LoGy[uW6sb dw;z<5snl%r+i5F6jP>5ze :0qtՎ>=l1bn[wFV0uimywOe107@Zy i?2莉KBu3 WVĂ5˟dž~]嘗 V3.|kr_z|mgSm%;LT6<{klL<;*aLdϙI>}cs[宋4^ D\.bѣWlP&bO<F1{D15 bI Hr >ÐtcYXpev9%I"hxR99cq$)gtA%> QK@g3W10Zmn:YNdc hK49D B/@c ǝ4JmY#Lah48c[6 IK::#RdLFhE<]L%٨؍e%keeBN ^8fxQByfr06MDxd)2.F&c},h^y6rvTm{BTS=hAVY=N\՗#tv-G~;-#,Nf㶶,4ЅG6;`{qB](mSAuQY BfyN~1{<4C,joYdpTOOKlʽ·53y2bFs9{i ,htߢ<M!۔4z!w~GB'.< ~k-Mot)!)zT}O`W?,l/p0 Og7Ja+R[fp">ep88=t? ;s8d ,8u4H_͡N3wcZc-bLv{)ecp߆ѥkm4-N/u=ݰqh3JQkLF(- Xl2M{-:آu9='R>E]ϊH,"Hup Bc)Iĝ:FeЙYN7@9;=PgyӃR1'c&Sfg),&1 #3b@pRK&Ae*lCpYR a$<f\9'2z‚Z݂ΧMwPKnQxK=ZI d~|3hw[O!i}5:̆oTn8^|Fl"Dl)3B7(k 9H,\:9eJɚDٞ8,`s= a+JDlrblҒ*nSw)rv#⊉y,];Uڽ.0 < J=pP`&\i# x3xjTA3ZX`_LP8&&3Fubƨ.K9<D,OED]u="nMɏNase1A0ǜKF$FD3\(-6NRp FTRT*XLh4']݈"1:qSqEb8f.5uX5x).R2QXGYTMVCa18Uuep5eXT X.:+$2 eAhq}܇xx:=lyXmB"KI1![UʩБxw&x4y . d!p~cxRS4I/dɍ!W ,y `ې8QYbRݠFa]W-6G ]D=J)*m][Od!}wO>oƳpQ)fShBbXP)*E_#&u#60^ #}\UW"Ǥ2uXO!qW#趑-7V7;}PS2`&; WP+wF9nsL/tV <0I]n<{uv8d0&'݌Wlbd.Tvˍ GwfGNYepEA"J_~T۹Q/7UKY;v;px4Kia%gG(R'#{{Bν/(99aINHZpXۂHT"Nm>qX1xjj(~£ǪWzP|=}I?xS (jO\-_K)Rk屧>;L?N ;۾ly$d#ǟ-T*wW\asWWH"rk+c+p=•`%X>TNY2:?|\1~÷]!Ā3π"o/Y;4nȊҊh 7gDXƙKP.?uKY(i}PQ΍F؂6oF4?p`ƣ5<א*)PB t2Q>캀0u?u*'rVJ?ba?[b03޳) xQ@1dud2c:Pq_Ӈw6jIUfUXYx.U03-N>!F\!?A8OY$kRȕV^z ]bPu2(M5ػ9,J1eKkj+m&WYUYs/] ܮiRŗ? T:c;#<"J|֑+}ݨWkjI3y<ݨDK_@TlYR4+R0q4uno ŭ|w4x ͜;N.WZ}IV08 Sxe ۓ18jG̈/Hnދ߹n{3/ ϼ2r;?L-ͯΠ~߫xv8t=8-jeŚχn|=ᰳ}SBG ޵Ƒ+2 pbZ4 " "YcA#fd 俧8jhܲFr{쪏_z"dTg6ưL u`6ۭ/xyڊVb wT,v(zZU sZkx+=Z}w9^;U1*&KK]yUF Bk-]⏔, : 1@UbFTLgVhӄ.@a`c{6Z,;1l.qVQl[6"OJVl=rsx?n-{_ ev5'cP2)v9ft%ƌ@R>AbY1&8 d0SP)*`1W 2jh')Z]UrJb1zSYG8.C ٔߒP@hCj#/2:0 @3вG*bE͔#)GSkR0c;1Xu5 !d#Zjt(ىbE/jNhb:ژF;"Ғˏ7k?omV= ݁/{Wt'G 7VOhctU8ŷD~&>!c4{{援ggWis' zGzCxR{\D.+{T Y"ɑ,tl56*bSH JݩJ&.nf=5=}zfS7IX?Ɂ͙O6[K5;kLeF zَwwSiknE4E_G,SԷ M͘fzOH+Hoz7s60 TވVr +T]@S<a^ؽ7lLs}:::ukͼXlGBL+O}tDU>M9#+=`YT\g][vغ6#1x׭(:4x5l#^inPh -,lࢡ P՘;todQZtz|:*SUW$ A[MVXEj&A Q3xр F~w~m% 81,( XQnE] -|a099T.5 qQyw-YI5Z4K+dAvP Ϣ%" ag)AcD5gb <"UZ tj̚jS`MpV.C#`#7j02&bUa/ƌ{Ybu^yNtzq3=/8bL^P \R%d&*YK]M(buQJfX6ՠV{29kl[ :(U6Zʸq`l8;_薉y,=t jG[RQd6:0Y$zQKMBFO#mmČ2(Ve4X0CbE6 kQK(E-qTTCJ`ٍK~< `q("18"c Zoh>%~}Ԉ"FN+Alrf*:("F|bĈ `(X]d-{ KZ`R[Z*8pv#˫΂M1cu&%bqx,4$ʡ8x1@s5x_t$h:eތx,x8t  lϯbMU>},n~|GQ8xF'm>@c 0u,R* 2Z) 4-j!e?Vyެ*,` OŬ5-m` ޔmeT1l);eBWbv MVgC2 bjPSվKfEaJbl5 6= :`Kru\!DLL v20sq8zzgD<1(dAF(itm5K r FM)Cgit=i{NF p0rz7s,*)_*6Z8&6CiQ6S8_*]jѣ!C#C$R4ewwpxAėz 9na8Q Z{Zg$V"PRXN,hTƇ{{O+K1ڱ? Bh6gw}db"g<y;hGgᄎ69:VYg:X}G3AvOyWl*;iȍ6c}%,;Ò[t b5Cz6Ki=+?pɷg 1P ES&Tٚ:gRجPf -,lbWjr{HhrZ%*>ZSevk}["UokuXLĜgdJ"7%AWV/IWiqq`")J l@l 5Ȱb1шkm"K=gĐr/{UV3(ɆeYѦdLVVQsd 2#juB9KEuVƥ{-M(R ґ|I9`sj 9&RAMMuFS4"5.eǺ$7qX-FkuTjc&kk6NPnP(&0)U2oټo9Ok7??5gן&ee^..ڕ\/rL~%տrD]6I3|27eJkɊWyWwC*jG+bk .~,$-x hg|J+XڼP\ܴW\'T~ǜvRe*e_WN(vڶ{k͉g8~k~'?,t(}>,uqYlՆ>8 zMQ}!iy}(^s?%*R; Awi>Oy|!ܧw<'zSj%fHE)Ή[( WZ~k8R(ؾ_$5JKorx$2-/nJb:oYk_w1u ][b@ɕ |sk t:]/Κ,y[<ǞK>KփѝJ"QsrWR@^/CNyM^nI)̲Ο Z'krU)Mwi<-"[m X'X`YA=0l T}r>zA2}o3hnq9j(Z,6 .+ƚgR "E<@@6 D{s>VWo`Žnc`Vo`KCt&S|^曬5q]6nW:@EUͺFQ]Al颶ɍt)ٻi#[ғc'qȴăT"Zg{Q8&uˍ׵)KYJe?ZaUx|ݝ~C3tzZ7ٺ> s/1Sv2LZ8[1fИ#B;]R~h3xnFlRCRcU<%RŒX%V-Hux"PIc#Ah= #fwQiFxI=*Ř[4IkoYǣ4mW]k}=Ym)vγ6c0cw~w-GO{ggޗC/Wl.LIE޻-TM[k1N{{_GT7LqOzń6}ڕk/1ȕ j]QmyyT\LzҀֳZ5/QpDYd4xLDnD@0a瀞R#VW iNnRDV{P6N},OۃQ;S͡}pp͍{s윹9sZ2nMUƣۿ\ (&D83[HJߢ$a',c.J(4mbL11-S0̫Hewk8IUxȉQ@;L:y I)!o2.7GCO㯕M :JN% wQK$ tf33e2DI*+tA ]J4iOQ9:@ڹ$kg4JZou(ٴH]Ou42QHG` yYg L+^~rǕp{Bz_N/񩓪/ URELWWd~F凚5SYhor $PEe"bd7 jo.I!Pn# MW:&FNY5I.$0ʒPzG.rKEjI9e1KA"c p(}-jڤdֱ˕{$hMþ+-+sSP*O4HDDI9!T 71%<MrkbI6hv<#Jy5`/M޶&hhbj,Xa|vfIBfI4mlȑj@c02Kﱏ@tʒYK"'68eQe$ӡIRľPlDLJ:Q&*j>Pʱ!BIw&P ܼ?#G?0IX %Cb ;-'Eh (Äw*EIڤ:?nijR$0dA5*(Rze2INZhH2Ni!dqBJU[x%IKUn:xmLq,D-#eMV<74DtQ(hj謭Ė9ͷXѓ_mM5̳SyD>tϘ&H SpĄ)#y&լDN<.W*(!3c! ?먭<:D!qz2sp𒢼]]B"8/Hh qLxu bz]ce/~R\|5HJyw#`Dkwg\%pwF ܝQ > Ulhk rBW-cMr xJsnQOGғ;/ΛZ^>>r}d).Fg!^ [Yy_$OQm߂)mpM[x5ՌHǫGȫF ""`JHk*}v2Zx5dS4p88]^Np2} %f߁xGWOz)4 `5trh%i:]eJttut r"ʀU{*լ-ttb]!]q-_vm_ʀh ]eR2ZxuQ0Wieڲ5!G#5bne^:dAswozѴ4`irЂ΅9UkX߭ miLl Mg54jan(hiZ*_`>ʀ_`.pA2ZINWT1Uv'y};2\"̀[ğ.f6(S)h]!`f4U(u#]M 3JZCW%֬(hY2JM#]a+xҶw~nܿA{m܂GBSB!̺`qhf <Ъ1n(WV7YafYZH"B6nEW.3mx*#+FEt|Np9mѦUF+NfEtkB=tUF #+\j kZCWV6(5I2y= RV{ob,/{;Ć^)Tѳso4wF+&Ӿ͏?0"at|5H{4iV"qZ| >feD=?`a=Uʍt?ݞ(_Q䋓;Ҭ\f]\ܟ^eGk9NkȉkM'6DB-M!1; F8Bdi5'r\mJ; 6ϗqE *^N9ϗq؛uy3ܾX%ίSnn~~>$ ؐV;ېyjN^f̟-ZL-ՁmO̯9q(&ѣON[-Rث~Wa->J|uMitu ^d^4UEm->+rߗv LJ(5!]ƽ"xf1TޑWV[8J?s^=+vAKrr <~h|vxō" ,T)P=OѱO : C'7HA.΂vCU%LtX>|%I t')~1h k6yT:4B ܡ2&1^S2`|^xLj":AL&ʌdXGhk1[ܒlQIcg}=՗T8%Ѹf/ou`]g;+=ճ̰kh`ctTU "dV ,NScl9qJ.t8\&|2 X_N= @ÄW_z3UfR9tˌT]H5F莅x鄗J|aMxp7}gq45< Q Ȁ ƒJr^CSBJe3J*>k`Rzoj)ePkY!R^RlMf4Z{͛-g72JSrv_j歯VX Kͯ.?=zi}Y'f}]g$Mb$PzV|VHG9$]S";rn3\Ր`߄G>;g,S S0)K*r=\lbB FhgI1k3! yȨzv|F#U˨E15[Ξ|wf//lLgL (T|;"Fyv%g6QY% |y/{WL Z+!!B2KIF`k :Z$ Z=dn - ֺh"8游|L@/EhVeq_]e֖+\˾x@C= F,&$5?ĠӑO&둴%Ÿe QdC 2o`v^/0_3y,gV̷{h JFuy#iTz:II]E QFh0CfXFy=X!MxHVCnjn{qHM^)Do0/Eӳ.Wu[UڱXk#yʤ Tw?,#,pY{hI}h u1#l5ukEjxJI-ɣb^<;kƀh>rQLR$UITl ý&lB_y9{1HϠZhnUY ;a,WJ7-Cvj&qUYkV̽2g\ѭr|ݗs2+dfa]*uN 7v,9WGE\|(å Dk!3"!6f5QkȈ)8ސvPHR<{7 M*`5X@+JF&h]Krv#c"/ꝫ{bQ'XwWڒ?4s}"jJҥh-Y:cBbV {QrHLQxuQQۄyW(4J@TQs r~JS!].*̻4WMx3I.@dG)Z})ƾVc_N^2XbVe`<撂Al6EDށeBb w!!G nwsXi]f< +Yy?GJ>w:v >oT:Ƅ9 L NNeX`bq,Ӯ 󖎱T:Ǝc:e!֭[<ۤ:m BaX\bLXLAi"i LYTQ*p8Ii|IͺM~dτ}y!{zw٧@ 7u'~z"_s6o|a=QՆnϤ[yoš3ןj2&LnP䆤0TvyWm;m~zPzʗ?rn|o$J]V:t&E+}*/ZԮ#{uw9,y7 3o|..o}afV%2nƼd[VU{v4+P5mm_I7Wh'S=.7R=6CȫBA($0a;o&r 3!.  eLA cH=M"|pVzy}/ /:S20E55,(t { OA.c:@t Qx.u͇O=[JhELJdsFX"A+EM51+C|JA7nq$ (PyoY_򲒡| .FqA ,gq=p8%=W2GJ&%c1VB_Edv`Df# ^œg$i*)қWț RE YCɊAl VRH FFjHktjAob ʰ(mJ*xF 1K`.JYw+ٲ{d?r祚U}^}gyu_&sR7?;U;.L{rq4\< N~RsG\_8LY|oajq~yyg+n>_s?}[ "*6[KwWMJ?Lz;IboE[gTviggӲ4o)>]M u>W{Or* OA/&/l|}k:qc/x񠝌c'r\Nqr"?T ay5+o}C .T̼hd`uzKD/ :Gi͒(xߎ >Ώpյ2N! Y&sB>˵1O XUNlt6/0қ/ST#/mɃT+1jEg5Y]mJ4$؝mnj钧cAOx^٘%l? 3 ?*EM_Uy$ T9o~ۨEW{C|L|dg'2V{}>o& p?9/I7GR$%02 ¤/ºXjF7]>Q?.")*`fhs6 Krp$Z&@gX }3x']cs~ 78׉gKgEtƏ 曶KKtZlgm.Ӣ%evG:w\SvHĘ?oۄ֛ol1dJ w6e>OWua;"WBZ-9>)}J2w;mlm,:VK|ˎJ`tzDYf$?j-U jrfu-esZ3$IƓ['2@Hө{;ٚOsD)&>Rʨ)@`E JրQ_bSFE#|T.sE@KAVx1 $$^i꜑.Rۏh̨͖QfİVz8 {+:y}Nv*~ǛjT*ǨH(NuFr(N,GJu$3JFY.BZ*=bV%հ>2cq$5͖GCfX,lFnoJުx%_氘·r@ӋӳKlƒI >k̒6ad3\4(X̔iܕ5h2:gUyp&;SuEKC;j9%v-Pv38F="؇(X@EyuhLƱ }h1%`ܒMf$q_CWPT=FFM=~Pp eT?{WF1ɪ"Y ý` v? &8W=_%9cYe?$[-6ŪVKBg<֝x8VfvKCAn+"Έ&Dm9G V/Fqx2_>}17`ZұZU5MT1Y)=C8kZ8!^Ҍ'`AedW*Eͺ`l˫\gA %RLRP&匮Tun֜C`,#~’Pd:Cl#|x}Je6֔n!gl߷G<9VH48mgQ*^H:RX5DZr !&<y3![Ce)^LсLm mF4b F Yj4i2L p0rzR.l4تTBV'*p*)yAW9.XġMC;;*)}'; xyҷM:o+ zhcAǢC-գRT19(_sYP'HX=uG֞4WpH%/Z䬞>٢RA VZַא.g )EʴX Ɗ ף|\co'[ WZTt^~ǰYt~N].y:lӺ3;nhݪW$]Cos MOms ;0z͒x-o߾ۤ٦hwmm=d&nY&;y!n:*iXTuNճOG\8Ї=wanǯ>ңNu崳6;]zIܥU2b > <H˰:ʺ'tO-WemQs͇<ĉ gs᠁bIy.ɠ^f9kI.PEr-mYz `)Fy'9c(YT5ug?ȳހ/뚠ZRkNX\+&eB(r]YB۾Y%ES|42!GCJ)#z4#zTؒO\1bB*l["f D` ZMR" :G B%J(TQۊ"R2*؊U51:')CQug3 яp=O?=M? N4O1rƷc/E1YLoqLgPx_K: ;ˤn_x7 &┟޵c]Uy'rEV>?~\/mլ{C%|]x7Ok؀ y#6,}xE?.?e߱E>=Z\^ Q.po?h1p5 펎|$L.:H즫kђ\ՃrZuW#Ary!Sl~}Ak 0o^8h(r=Nxœ,>PYl+gD.Vv|)ّJV]}҉] (ǘ_GC-#'\)HUGӖ8f H9&m`*B]FWwI:×.VZ݃wǗRs\%(c;[YҧIY?K` kdmyX'WhNI'nQ <ٶU [OPIԁ){p}3[wfv5e'FBQ(dLy%嚃u 5.Rh,m*(dBr%CZdy±Ygv0 s-7;ȧ[w63ʆD'hʭ_NjY:zS6߽򭯶a](\b$kf3۔n)`ڠx6y*+Ԍ-G*R]4m԰ҮV`RQ*&XK.wt]:{}W6pzxmCr0rSTbOF""NV S Xp-MJm+\_I .PADEa =A3L?%-9S̳?B`QA("uNq Js C` ^no6_;[C 6=#Ze}{<YӚMɠoݘNĎ^9w vfGw^GٗiV*H8xeCq[atNQ6&$mRL^WH%T@XJ-x[3ZpcPQMemL"+UJ܀ܒ}=ss/ e g0L'-u6SF4@F aΩ} fA6&KFnFѰ&)Wrීy!}ÂBߓf=!s)OC& *1DyxU6<z.UCϭjT#kdt9THGf5yWb 1Z8TgM| S.!1Wң7]8Y%X3<';/5,ÃD}zRڒJCZƂar1Z &j#Y)C}@\ k{YCB0W} Da.%ɪAe[*T:y 3KrUR\=  V5"qFђɞrݻJltjކҋӓ#xÑ Q;N!1Pq;} /$pTVgSkbNBPo&&kHZMƵ% _吊xWClJv!2mt~50|&;$ P4T!$/ә*T T"cH5p< 5&; Ԗ:.V |d2h|l:74m%T^b<(šaQJavXGì Ղ"P4!R21iq)'J n.m|Eޛ~;ptu6nKs֯wi\UmKbRZeW6CF ,0r\ħkՄGVڒEkmFJc>6E{l.6#ю,)jyfWnɒ-rk,{:@ƶbUz;.A+x#$rPPr{]4az35{(r+&C# x᳢6Nr}7k#99.3.Z*->X<(}GwM\f9t{:ˍT"W,&36㚷u/=NA{&Q 깾GŇ{XKϸ)O:r|2ܑ@/Ň/pwbKcĽI@fh2V:%Ki.N> `GK8%3xC= %EJrԞ/z#gObͯ|~;dŸ0:WajjA pU#Wm2^Y}cvG)٢3 >\ٗȭUVmM-g. Bۻν7enLl]bzuٴ=N[g[زlunMg=5y[vZnt ݘyԐU`mz:t"YN$c T nP\b=~9\)D]{=]>o8-'txx+F!)0 p%#N*bv`ga _yЈJ* QsɼW>PfXbu`1ÓCLFw#V1؞i ""h!i<-d IFx2WLaR]3nw],y&9m21P>,^e+hR `jt56;+of_&eTE/n]~oP[(P K¹j^5/`W=`$Z࢈#7.aV^_dSbK6!饸oC$"=3jP˷CfJ+\UߞU <`lQPNI! w@B%marw'D¯-+q&)\-Sq딓g>*:]β[x12ѪesɟֆFSklB5ȒMhik]=n}T5[굛!f9vbFŏQl/+9k.m>WF5ޤ?(;Dɚ8!̧C&gQ`b.$0IZunTD[←l %Ϳ!1,&ьKTU^\uӄ7joިp5X Nha_JN~3<>ŋp+*Q7Q"I&h&,6d&I L"Yt !e^QbLޓT  F꘬ge$R)Pbod썜=&􆅽qơX,|QX6hq%㎂=3TUXon<~+Gl"D6PH}Da d ҁEy`Z'M ք z.Fjx8 Ys=,&W:(ᕈو:؄*HHi_Fl?5TP78Vj6R,|0uX+)BjwQ1EAŋ\ďQTP~TH7 |;J4GnD G/0 ݲ˛3s=aoZϵ\O{CQz@F GύP\ΏN2c`9cH"5N@+"ZC QZn RpJt[JrGCoTȏŲO(Tew|M_~;rvsD@KtNWMIUk]IMg/vakw.~>sz4ygzc{`cyWv?beUnizÝfW~ܒfYm^ձ9N-S $zٺZ=mޱ٤ftC+= LR#tn/ 6oc MLα򢥞۾Ѽӯ bM DfC-54zh[}$8Ea^J-e.JOGtQ`JBaLi<}V"_;梉}MgA \6 n/$-'>+aT -ݍZ(/'?/X.iѤs,n擪 |A߷ou"/+kYXNo+Rgyъ_/=e_|b*KLWJ% sKyAp3b^ \ery)p*S)W_#\ gJ"ŵv~NgMQ!Fu?vgj=MFҋ7O!^;Mf1g9y$駕"I7xܨ$gc1v c-R=0e.o]xR95ϵ1Hu1oxsf{zMWIip6ǰ Z=߲ak#9*$(4˒y`i؎x2N.I<ʬҝ19׬;AA{J1IdT QY, Ah04#ߺ=$$5*XӖk녥!R"1DҀJhCBi{GMOΦQ=9[P2%#Ὸ|;n+U r]_8 L&$he4&Qb `m#9eOɝG]$Er"|FTˊ)RKRz艹R!))QEqzzjDR z# BKVTVKäPt:#gMpolg<T&Ƞm-%& )#)*2Es&V zELF H bcU<1"!XE5tS&6/ P5:c$* uRI.P3o&Wģ`ftN K2R谨Yƨgf2vH28P 8J2Y% U]r `pºAHƦrZ~EC/8TsB{C3f3hz)!=`K_u%8\F GuV`WP&y TZbk/P}ϭ7րo3JFe]7n-2@OJ9Pbk:W \fRȣ΢[mPSR$*<^Z ]!˝cj-MBs9ˋg\"4GUunFίo'|=?8~.|hժ{xad_%<ۡc|/G|qL8Ҋ\زJwkuolW{"S/^[`/ħ_ p,xGbgEQ[Iu '9S-ypvg;(4F]HFb 0Eґ (ULtq4`_(!ϩ༷T,%nТ.yg+*|U-7rY|צ~JM9[f7tݭw]D5kIk!I3uˆfLfvd࿐h[m낈HہWX<vuU׍K-z^iɚW7#(u|ϣu0r{놎xfn[9XM=O;|ֹ d ٍ6O䛣c_ .Kk`d֔*$%9`sÙD*E>UZ YU ,NVv{~OzvE`gY,ZyjTX9CEyܾtl?~O YƠ<.pJeU cm5ud"yDbs-DmJ-C8͉ `[CF щ[RX.pM̋ɼ؈3-7ztW+qJ&$h2h51gcR"%iI 7H` ߤnͻ17/Jz^YnDQ 7ؒE1qK,Mf,Zeٓ\<2oѪc$?ͻLeAl~{MrY+j!oƮ>Yavz.es~B3GQFmy&x(8 ߤq?;~MW\k>B47#VM3vSeѽoݪo hMQFE@#J CDc@#xƉTV\t0 Z7u#=E܈68tHDVJz4N=&o"H ue6x[1رxe -痁sKhO4T="x6!>%@c"_[-+\&Rs}֕f.M!b_MFZ| s|[4ᣱA q1%SFEL1ЕSx>e|z ? ?FӃYYhWvXlK,-)V^5TM&풨nzͻՔ0Qy"9:,~j:M#%N0tȚo.ݻ7Zd+%$bkݸQbIj?g%Q:Bjʩgv:EL689\z@ X$^<)"da*)qp8g0rj1@Sh$\QM*Xm2EM*`!E52:Ř':_ U1YϢьI8 QQRBiX-jUb.T.|V*[=h-f_fa>~۠v|6[с #$hXCLA:xZ#,q(`MlSnx8 s=,MtP+5}`ˉ IaU nSXc#gq\q1EkCjڃ.0< GJ=rP裆&Li3GQAn"f8_$R㍤aF{D3A9,tA;WX#g>lzOVǮQֈzЈF\-=7BqUe gsR/DpkENk&bF@HMyԂg|P&zLh4!W+Y#~8l~H{ŸdWh E3A/xA" FZ`KMr*u:ʢbFo^܇^<}X;ETXZ4EdX7D}>Qԧ||}C 5իBLcb̛7jM޼z~J89]b20;&(QWSxB?~Fbv0*53\ZTFs9x7D:i _&fȟաרDvvMomfvz׮s.pjtCz~1b]<`1QUtz nb2-TGrcer5]z9|B<:S^kkud0(}$;L֨K. 1h58m Wo~|NTo %53[ yQ*o6L0YLnz71R? aFQݤ<[Kj:v bZs]job$5&kaB Vz#F!iOIL^a\ղΤFϴڳI[1 /FH@,:Ý^5ͫTв{+ܻ,\( fȏEЂ+8,ϺΜWx,c'Dڇ<")@){>x1:g֑:,h~{;6à@1'a!!'+9\p љZf| nP5V٬>Aܭ$pǟfhɇU/b}7]j=U}lep5eXT z.:+$2 eAhq}ď܇AQ'9gA*v완/[_]CQ秭;66ytOw353I"gV{C3lޮlZ61-F;G˚-M杲{+^p&?lk캭эJRΖ6- Nѽ4hFK5$/g3 M0lOٞX8~y4Δ KO$'@NHZRTpsHO*OޕFr#R@gGX`3xxe$rKUrUI}7u(JR>-e28`[Pcb>iu5x,TGRŅ{o1?Ѝ@VHknCLR4 4YBE Hg4hmٶxm/~'e=ߖ^~ 9j$3k•vjGh_ZiIaf38}:+E}"fY#r $_~ej3LRw\-? 5%l}bVZ<^ч Lv* 2HR6e `S3pH -ô(е):1-Z)1(ќG 19ϥI]PJJɖTGbu@)Nӳv4 $xBoˁlQX27FZzRvjyXY/@8(jZ{K}ϙb:"EoM1ۓx GWWBH%מqn..hAՅOB=!>}<{B"PpM)0L%rt!D .^gT莹+{A7" B kAPB* մO`)jk75ueZ b zX_ >^|0,;[Ɗau3Y!Y1^ ^ULQOJfrhc>zzkDq0ِ 3\g&<9<>%|c{7GMTouL3ɱvJpFņZGD,$ri+reؐOyu;/)}bUtzzGvE+TdfƧe[/Qq2K.Rf R6A0/p.%gT^L4!93U4ӳ@zb^""qmKZrYd!3!2NjK^NXAʳT3I{Oo 9^yZUʘh!:`rԪp|I]4W?^$9jaSܒ'5Y&tPT8 wJc֙I*c GñWƫ#3Yw8C:n:J>e$}D+=ɑTRgבD!]f:,,w tV/#BcDEc2i6'@ .wYrJYM6˨h7!rbc\|&!UY;kL^cEv8w}#i/S@>v@I $Jɬ +6Q'c2﷽yQddQd\D@n`!oAUJ70Y GUôp[?~rx7X44bHHճ]<{{M£7x+nhӔą`>5̣O5Ҩ^<4} xk8gLg9>I i:-M۟蒾V| RRԡSzN,GJMD~үQd Rlрިƃʌ3bT:bz:aXX1BbфJ9g,3roכ$ \/;RIS>j2ԶqZ:c^3zAE}|t5WK+բ.?8e*_y `QϨ,,}6eYeJ=,]؃,R,ei2z:64e]x$tA}|";:\eqgp&xI+lqlqyicߴgix6sŭ{6/6}Y%9x KY͟'zPLJx`78- <^Oh%H֡^gAI>gwaW)w:x^_)@\r0ӳDFs>s>1w-9z ly1u}nQdeڲV)&Veq\,Kq8^ހ\֠TI裸r9ZGN L21@V'Rz,osK}ԛdi]_Imte>Dc>7&O| gI3ZLt4~"*D!\K= 6'14\&],tQHtQȞF!:fW6hÐ39: mQ˲dSYvjR"!)z <9($WtAH <=ڃN rRm8kO =`}n\:}?8O/>opfہYMwf@L&anP#Qۦvm@ֈ\7]l 1ِ 7:3K)r}oF7Q ՝38Z< Q4d3̠ c^ }頥@uV&H 0Z7{bvmݻs,hTJ%n(KQVc,t)9("innӽ5v~} = gH*I I!uy \Y*b;O V9geRa!&&AN)6LdNdcR#l.jdzVm8kw&lgAХuTg1Dq}Y6.gٴ #EK6 4!2>NjK^NXAʳ$r *H4Bt ;U j8iO?.2w0R*'9jaSܒ'5Y&tPTɢ wJc֙I*c G1Jˋ FK7piϮ۞La/ xA^=aOXx_雼Y!x7Q%2 >xgU:ұ(L'pEO3i[r8U*TV>*! a2q'mmA%dJjF{"g,vQO=`}юjXPC$8Vg蒦^2G{\{=iD8$t&;*6)oS<(2YXvaɸMiJqoV%+ބBnBnBn"%% dI>"3.yƽ; 2P Qy[^VBăF!Z\a%d@O\mI`CehWm8kdx8J %?2D[?=/19٭V6P1Šcv^=:6)wm~ 5X- Cc&pl 8nQ"$98klD h .Y*dd%ڡl9Q6.wgfgf;B (&N&pRS5XTGXH6X{))ᒁ)'!&"0B,,xGI4 w޺88~%&7:YMȥrmc4eZpS ̓rXԷaJ; 0ح[e5LRO4: o/[[o_CSM4 Vc98kV욗=7Op{FuVR|6uVRwճzաY^b$uV:+R0ѷ25AO3e4Ő3U9Di~@tKFA8!0J28:O氊p!v 9;CQ{CTBA/Tj4vCP q`8-RLp7Qܳ[wvn|UܳxW+ar'_Ϧ6RSFg׻kvشhfxv%%uyxeI3evzNz:[4[Sg&gFNnZj|f!tzbBZߟZUa8'6sm͒ ̡e^skwۆƻ0WZnt^swt]x W4vHv͟7ךa'+Ҽ8iz8z\noNp.oQ,ϜWW`q(o?utGi\?r9I|%{\Gsύq,[ŵᣛM?`w~3sKCƠMi!8O[,CH*A,)D1üJS$ pS >qw햖W;xd[lѴ۾as?\ޟ<^=bx12z'aW[^QQ`p0 &kQ#E#$^>"G(pǎqQ3ƁdFy%8@EPʤ_SM95f QiJ`W y"r!hJޣT_ܢm $42R)D* kǖ{f#q-*+h")xQk=$Na^ 22_$i1_' fL/DPz({g&~\A]F٫0)"=><Տh>$PU'~ %e.͸EwzџS:B]Rk$:Q7bf\;xws@:JOaۈӎJZ$]C.944FW/[%A\p,Y,di FQRbm&\cFAw&lHAl~M͖DO5?Z5 4zٛ]r?-~O”Uul%lI'ܞ~Y]o!{_n.9.n:^ނoZ/ƿR /m/V?>5yz|p1䦿GwnE1\lϿ&(j~i h?nq7x_-r43Еơ=]M߯<7N?$F'vʤM!ct_cIw{k3?d7k>|CaeK2 GUT}Uvjrs͎/.,绊pL$S(!CZZIQEN7hbd'fa~ hal~<˩g?IġJnͳ/~7oWX>0Rۮ[/YspLw*0v 0:zV<͆X9c =lF,M'>k2UK HE>b(x]\.rKgqոlKƊ 5Qϧive/RthRE5+6m~ޅy~d$]]|'=Ձ`sF̵!6xd* ]x3Y엫y=bz-w_NE˭RJi)# "2nU^_ܖ{W{S-SMn]#'Avu~9{҆f֭Ku~&)Mu!Ԓ݄";E&u뎙~+ԉ> n|CW/%_k11FSC@~$$Po)ge80`=3z@}gS(BJ0J3|pCj=(#48j"ĽZuts rVVxOzVvg}qK DŽO"V{MHD0JR5A9McȭQ"gJ rOX0kOȱtUKWAxP_s5ga<ՒA#v  4:S%oA1ERB#V~Ϗ5X qJt[eE^y41<ؠYa`1rYiGc!X'XτZ*-FͲ}4O;"E RrL*"QB pLNT' Lv sǪ$<[u@ + K V3c(՞qH0Q}P{p@ Z$_\' qiz*ݻI9h/Rd%Jf&D3Ԇ#h`(H CNi/*"tJD(e'D CUT #waJdN%Ù%M8Ib`ᇄɊy1: Mؤ+T+bZ²>~] ? ? pE&UT7,' ^L|vg`q9bg6L+ ^^蜸}wl44biS[lZƽJ(>/ؠ'8,ó„ShFc_%9ͮ&)И/albwY.RI~XeY;N%CMggSŠ5>ʮ$Zn3?2lf􇩄$_JO7/U"vzI՘%{)Je KA)hSҎO,s'a^S N.:mqk Tasw7a Gf'j)pr24LKO|8H=3%|VYv8ŋqxCv3+lYUwĚLŻu]=\a1|vV*. X63#G;'y W%j|_u2lR`o8x7 o~<kEHs9b(ۺx{5U5bJZ9 VA \@B,Abw`|8K(B P )>o5M^J߇8 i1Qk \OBp)ҩn*a=bc`Az/ 9Qݢϫ G^F x1]}w(z_ɻd _pݭׂڷAuEq%}ԯxnk5 mϰ/z$Ejδ_.X/~<9;b4ԡ]I=6*өeL5$e}JR=JRFIe$Uтvg^=[/bd/>x49JYYD4E4ݤ395i \)? fETYdfo5疛7Bm@/Uwoyu*S&V>CNB$ʔFj> z jFaolL*1c`#1Rs:P ư 8 5]Yߝv~ޛVzm}!5/6ur6ׄpLVͽqX'$'~(' ^SC}7sr`FRxf&Wtt`\S&BQFo"֝%߶TBIBJVTDAc2%-%rƈL͈b j7ӎ}Am=`ox$t*a X[>]fL0^q_a!K V52kbKciup$";x{?V}Af-"ƈhDƪE4[~IkY#J/uH >1!d(̝!A3v" dP3!h1=iF:T8or>u6Ӓmq5E;​7i<!0Q>@錱PdOua1 REiǾ6_ԓk }U.([$ ُhbL[-j sᅋs\7:Z:C8/|+j&w8e{|S28]xwJ{Q n%c7e%%#{;q.A_;:^}L ~ҐeÐ}|: ]z5p&_ޖ.jؗ^zlXݗ:x4Ň/\<{g| FòۢQJUňqyz>t6<-G([1 Ҩ1bw6;.BWd*:ghDOCӨ55nX.zq|`Eok|$8T){߲,G tzN}j@c3,)uM|U(],1 d |ZTV'hZĽ(s %BbB^ɇHe YRr.\@ȊDTY`. lHS(u(*MBGa/54疅pc{T&= yyo2GxsЕ{S߽rqέƕ爟pԨ؈YFC*HŔEnGHX\v@M:i5 ;bi.ЎYQֻ+6J"1RRʳQ(Zg5g_˳Ϧӳ屃~=>"Ƹޯ<,Y`%JlJ{߁cUJ#Ⓣ-[>zBh{t&{dqnRJ_pWzm=h8$ \Uq8Қ*!\Im*ЕVpC*WUJ9•N> [u0pUup(p*\=CBiط/`TH1cߍfӳũubc% B;_޽r;#n[ҥvSXvtN^.]m?se`*kW<:\BMo3#n5e^'C䪴ⱻ'W!=92(V \Uq>bi{RWrei<}S+ɣS?W䃽G/㷙W2&ݗkS$SlbRYx`ic^^>t<γh5I/*?5tz:H^ЯfOrg%5bޘ٧ *]3P9ׁE0`0բkwI( SRA*׊G](fJ ohY[(O;鐑O(یEc[;E*_4_(:,M6)ROvFۃ aNNPԡ0*-¾3*% $D?Dx+=>V';IiiJZ"ȟ|u ,f'q~Q8Gn/j-k m.aL=Dgv]dL*g):s0@< EpBe*=lZʸrhh94Z -CCˡrhh94Z=C-YZh&)flJz7+am"[mVb28a0dg23ƙ Lq&xwJ儑4TS[8ҤW#FWBu!A]6g&gLMyLM轋=F1LnSkVa>Rcf0SI^(I`;b< 6'L0Q[c%о%h.dNJ#3: AflT ݣMrRX5Q*IxMtB)y-/(ƕb H]L!&xY 0bhi/8m!/2(L5^\ @ATicrIՌjų' ~ҥmG^X^IMQC{ek/HEJKS p鎰6t]$AtJP\,V/ ?#kx7}Nx32"eoUxWk P:k٥+BgKm+.y~ag5Sm.(Ji)Ph"`.RHY͘U"}4ڛ@4G;$c]~h!Z5zF?~N&tS$oQ2\61'5"!aH6ydHw/+ؿ~#hq#zs"qWOt[! x5]Hh)3!qwwd{:)vݕ/ΙRDVGHQ_krһIz>Qaj)|Gg3SB׳VW߾d<[W.giMb}i1#Jq`GĎJ?Cyyٟ݇(BvCWuꂒ ៲@%)Cmb 2])r.HKbjrR)Q3 cJhr RV:p% S9M^" )*Y<19j&frĘ82x?6]qP{Kqч=Cs|'=\nW))(V)u1!JER0aƈg v QI:1o]',w֤a'wrp+^5voɿA-ߓӳ=ΏY mѺԐt'Uz߼E@7x5x5P^ =WC\I! >D}l&%cIͤEt;̩ :%; u1E@uK.r:J=(KRltų#XT-фDdKFʑXJQ^5|Y #twHnϻ+?62*Ylߒb> 46=#~s2?}^8ЀgB 0Ob@e]-.Y۵rzN4.3Ga}jWu?q>zہZNL>Q žOJ&׆'Јy>;|4\|n{ieJq &ڻE뷓ᇋ3hɠRUN~KYl7n*Z}wE"w^&>Y7m/i: h2ޗZ齅V̗*CYU>$S2:;hc`7,`3,:2u52Z}Ca` GK!<4j(: АHba)X,Ie|aF{u3w5IOHקQm`3nJVfw{LIqAdcb)٘PJ6&n%zĥ}ƿaF >oh]$=,*cƳLs,@lT椇{Z_[ZX{]PJ:$DMQin[ցB;hj x$ )[X1cFˈchnEJjdkl_,Z+==](M)[]-s6nPyxk 8o߁rHoX4*a%f& ,izBw'O%4prpW&7Ve2Cb65 m&؀.&Hڼ9Zgк|:Zo&9)]ٞ}coe-ZZw'{Pix Vvr<|97ϼ̃[Nt z]hmfVzCѥh.t| a7m2C̕{bA7b!M4"~*ͩD#6EXi:6/'+ڼL]᪈l:2?)c.;٧;pm&Pe$uÌc$ =zz|_kg*G^].5jf˦`d"xk{K8yExH3&Uxd!RHԔ1тFрG!eLDMA\f5pC`d72*Å3qSp$q D`ȋu`;@ƶ&ۡa9$L€/e;Q!(8vН)&8`*;۽4 ՊP(/Z.v9Py1Rm贶9 F:Rb&y$IU1kQRk ΡLȳ ķΎ kJw4Ex"pZ;Qqƈ ) rlւ@0x̶h 8p &u~!#R* XEdZFU!@H%1i7{,|uGU)Yq5quI D0`(h!,R;,`<Gj ߜ Z; dAkwE}fF2Zz;3yZLi.j7r_y?I ay([f+>o釢E(?3m@@ MSY8^&Ti 0NjE"ɈL?mgi0ͫE耂:̿l!J'p |2hLLqd Xc/nUQCR4 jxTs3a%rs ˽lYQ[cgYY0[OWوO"'/nJ x˶e$ߪU޾\8XHJfQ8*2Ǒ3Rg`ifV-P2zEPG R2飶ěU`xXĵm[cg|lakX[(z[xPVEW'w[W\n>*;j(8>}'zt4hC$RଁJFM<:t:)(eA+X1TȠ3 j-$M* %0HEt!ߪmb|W fWvk-z{$qi NQcjA ![,K'K0rX1gF=$('qSx.(G@uƴl[cg=,A"N2]c-l""q+ob |oD*'4Q $1bQ[NGjE0`:) 8"CW!!`"%^2:łK$MFbRͶˡv}Wm.]lMJkuvQv$Gʬ7o F {BDi`oU]IGW~=܃ k*A,g~\PTEw4&%b(}Ɣ1e`R1=)it~/؁}xtD!#LQ!7Yb@'3#*%N5`;΃3} Џnq_ǗD8Nb>Yq>U!CEt8y糫YJ[/HPI/ hL uq^Y&=)h(\._fa_QӍ[okר]KHb fw_t=Ƚ n[2^vq$c]sS4v].ntW]p1ϧ2: $O7Gs(*H\RDk]?l%7Qлtp.H6ؼJ! "]_7Ȇxܭ5/yxwkϰ8IՉӕ 0:棰,X=}u}Kb7)z0dlpٻݹ*峳/~C+~nE~΋L? n^ȃW^%(j|Ӌ_P_.χ?fMOG)xqb՝nV~w+ܤC.)0%IGH'ǒ7_Oo7eN[ܔqC5ӒtG }z2UcӕC kq黲:~12tLl=wUm\ Cga4)/ӓ:MB>'?Ɵ~gF&?p7O8<.' ɊMjxRt]9I@zO<<$63(@'J"]xx_s7@(X:0< .[.+rF~NivhR~żzǛ*Ԓ 1]|KN( \Ibpzq1kY`Ou`>#Q%3m$`d> ~s)ܼO;o8UZoiߕz;z)^κ4?<(4LMCzYj?7O C#S3 N 3+Sfb m Άynp,ջ:n][6~7)UmoO3#sF%N+ɉ6&.=@qؔ?v>Jc{@_J"_k11FSC@> Y˔ ³O^[t,0UԖ3Z0n'}1 QiafD0}QFEi$.eEBHњl<6k9Ԉm/켭:k@)e踛OL 9$-r7;uxO< Zf;gG^VR*- 5[_ ?\rVqPͪ%;cj{8_lrqwd_p9Np_e)!)+JU3C9MlHLwMwUOW'%qɗS,%<E F6:TB9חz]BYG'6_KPLg`OqNi8;ΡD\jō0WFqEq]_+>AcpbR,ەjMK;}SX'ժU)NGvZˣ}O~ts:_0wyFl-E)9`=u]SեD]\ihaxӨĉf?h3pn 5zꆫ5U?>_~[W^-^;(rK^;OԲ:er /B}Q۴ZPՃ5k5MAeA @~7|ׁFU;HJ4=u.Ut߳U}qCVJ{0Οa}^W`yfjw{Q}R獪y˘ 41 :ШɰҸĄ!:p>gHe*iN{SGAK禬CGCx%rmR z1:PPĩI@X?dA1:Ӊ=yw#|1SukVZn/\-Cu<\7y@H.oIp)ϋ9R=cڛ1tl}^KN 0UYh!,O!IÁjxk`.iG]7Y-gO)fȣQuZAsUuⲂeG-ɌHݻ8 /̛\-8SjiX@= %5?N~߻+=&-?BqXW({@ 4W\+z+C+RBVZEUɣ+Wc+VC+WWoY]JߝkTlx j5yZp e r XmW{V&u"BksgdIeA@aJ(EVONc*Q`s<^%k&i5(`=L0-2kEmZAmHsFo`R^ Zi+B;"W D2q,Jɡj wվBWMiA&$qa2߼|;h-F:_ȍSZ)!By_ Dpd/2TZImIyBRA=ֳuJ)\-\HLIL6p%%'$Iɀt(g))5iVK3TL*}&>JI߮gR7QTL*TL*}&>JIϤgR3TL*}&>JTL*}&>JIϤgRYh(O/pKDV+b_e] WBLJι/Ru1xS(]B}VG/+;.!oO캞׳+$yՀ2/4VX[iy;/]v)J^w6z|Ѕwc@xO[1 BPJO58J}LrXQA6;ǷWSSȻ~F(Y M4N! ³}[}xr=nhPm W=n s/ϫ#aϣѬ#aWJ֜Z-ma<-I<">L(KRm__3f5<0\.F%XYϼ?|APRRNe)8ӥ* MBfO§3y؜zNSOSO;?"ij"+]Rup`BC(uvy't m T3tMARR23C7ԦG[*D,ΔH]D`UiTk Zٽ}@e@x}y1RF8S6O'"747br08(߄Zuj :Td̆gI Ix0͸^kcCL 5]`U NnvS`ғ=׽.پbvSvZ'iAs\T1{d F iO*-^8EƁ)f@o$P;x'5#ė,18 wI:i:y~>fØ;ӏ}QwGMr=7Bq$e1A0ǜKF$F!D3)"ZC QZ@lHMyԂe}0" \i*y;Fy&j8/:δd_\q4f.7uFzAdRB(,*h&+Sagq(xh'+l{Yj);/n8 W(*$R3:g?$1hMXڬc)LsAQoUړx%f6z- 'y3,0aRz*$(9/Bg$Oj K<&t-d)sԘ\wrT̂_*X5@sw5t&Ύ$ϙw&T:e,c4-}sqXw!:[?ozxz#_/enaĜIf)qsd8媌~hxuF3˙+{DxD@]nԸ>0LN*" B:C4V(A-M:Eg"4lAnLu6!FS&F5" F9`)؝pJ8NPXЙ8G#vgZHypލow;^ZpŠKW˦{^j}%a{$(K6S)Bb rY!QL- L#|$bszx&CO+!uFLt ( !ѱDdUY쭡 G2s6IţQ b#!SAgFzIz!$ٜPs\eB$g%huZfnCDQgmL;< `2ֻEt83Pu !dGR apO»a #vV `lƩ%ۤ8ל8*&(4ņ`.8OfiVnLb2mk8?U\R1^@IuɨPK΋i\e*ğt:)]@a+,]N޻Q[ETѫI(Rf{v(S/GlAwG!wmFy|2:p `E]dj.KSӴ6[9h{K[˵r ]IgMӊXɚ˭ݶ[,D,W%6*(m^m%dڦ";;IC=Q;k/exß~k<)|Fc,z]_xv8Q{Ƒl O/&Y7#qp?O1ErIʎl!EJJM'@$k٬驪>]}jYhs{קmբŧW+5_OwN?|?3Ezvod<={~0 c7dO5o}t.Z^}e2WF0gWc?'g6YT9헋9ƿ&7&odS [`xykXQ_/q4V:w.)9aT+/m<\ekh0;^Vnq';C恚wZbri!s?~[7XmG'Ne54靣 Ozu \&iQ{m\0'~eo3ȧ~k6 F?0~SnlEERa-v}y꾽 #={XqzVů}8 8K&}* ֜ 7wEGk4efhu'Mo-/0KdA6Z{5AM5 o淓]^b޼Ǜ& BG5];7"Q% ^}4׬P1jM\X*l E}4 /Mߍs? jo\[+;0.`2Ve&~wPuaYs(O N G Q20; I-++wF}9ܓ.`2|hA˺mԞ6^0@mݲ3l3޶Ǽ3kD7 ^nŊquo)(mwETuY40,a˃fN e6M1962ߖ(E\ *SABّ%痘Z(;@Wx=Eo;"PO/$ppѿ=\L?DZMȁa0) {s9:Q4'tMes,7!ۋoڸ /ԋx!O]o{wYht=ܶY rXY 5]kz~n.՟~h:w^e _xQ~f~1/}U_4Nz ^.uL}yCm]/KQW@<_\\ôBX fz*هVTZB+B4[ϗww'eG읂BL< 1&/&0CJnb4J8myN OrY8"wvҡy41GPKPrAT8Kj#9 hr+O\&&="-IhŒ34vK Q)Ch D .rb /lgj lqyfG}L&KBQ$ItL&Ud`|lCZQ2͍gRx]Fe2NZhH2Ni!d(8梘Nܓ4?FLhc" jnQ5G"X1KDFsM4MM:k+"]LcyEώ}0 YpS Lx4aGjv P#&$N;f%$˃z%r:(gK0VX:Da_",O_t;&UdX1'5'.䉒MxiIpB%eygK\YRerh.e!K+Ƚg=OV>/8Fg8N"<2j &,%`DHO,y71%Ph |l+hfrp<:Qӆsy*wJiɱCGOa\ w^>L?G1 PUHGqf+I ShDǠԁ'G14hkK6I u1)UN\NADqG8ρ`_9_l.p1͌$!(bbyZ Q;s`S-j! ̐xoޞI@>M4lH3Xmx5Ejg;YCkQo(ѹ^Wy֞Ye8.qZp$ޕU‚7b(X*g^ŸNztQCXQxF<^T8gRf2 :> ꌡQV!+II/=:٢ Aznl $ Tͳ T&H@xFo }%O=*ݓ`}C05T`9dTMgۑGX ppVYDeA$B9A#;BB0˜>hٯsAo Bh )^p! 4D3]~u*䬈JjENh 'w#7iԃZ˒-< 2Fo ƃjN!2aB)yRY^6,x,E&ixCI 9-rF#ȳ3HXRl◯h(^.tB],/4V "} SyLqhU6M"Pe; 踤]ՊQ.Gc1 .#  Ց ls":e$d,*ɇ[elc;;"F3Îzo!Mh+/nY h9d_6/ ,S6/+՗R6/ḴRjՕ F" Ƨqq>\Q0H{yV`CZXZE OL3Ub9̩ o^71)]e]bThfޢE >@=#z S?:JP@)5khm:%* *Gx Mq&&5,]4Yˍͻػ^bI^&v/}|G9#k^o8ȵތU6>z?mʲ_mfɸqwfb`?GVozƢ-7Pl>s<ߏj~dXA|Z_$:]JۨʎvV}1wE\šol~U>|*w+Ǹ%8Ʌ: w7#Bnm+Rכ?f {=;A12|\[Wغ~ܺjZos4J[G[Vزjun ﭞ]ޣ畖a:ooqw{t:Xxƙ!PC::}_H7aewՂ˰p,r/j {#Jź-;ѱ<6ϝq<ܨV141[Ż/>7jO[%IzL}Yzg_Ҟ[|@>:is^Iڇu ۗ_. loCDqGmzacHqCR=-܃/MJC6=.jTGGi}bϐIeO A)y:pp}2phMxpۿWhI<V9K;E?-~>8</;-ϰp½s@Zɥo+Wḇ;owprXz!]L4דpx8Bto.n8dƊ%?W"AƊ8| ǓwW;k~u7+7z!}̸Pv^+'O/{zE xSl׀av2-Q 2}f3qw˚ oVMǬ7t,/\5HUY)Ԩ7|Jp,*IY-RI[iGi<ճ^bg{pCFK$3RoL<, CRBTó[i|E#4)lR!TT2]>劎ƨ2:ּk94Y<5Yqzx/dp" !0btG~?ֳØǏ,1KƓlX,O@ɐ>% V?W2X dGUNȪs#s( z[4hZ)*> ˙{pSc%suc5H܉2VM`rce_GR:CZq*ii f`8[hkOJ=k]:gIhCR [AioeM1j frYd+` `֖B6E>JHJEu)I6$2`4.Z=7d) \Bm xHtv+ ,*T0:Y-Yq9mY]7n4`G\56My EɓcHm&4,HYgEf\hZh ͢# 3=U[˺ FBP-'ơ(͛հM1 XȀ74T38]-x*yY7r ( [՛LBWBJbj$ ʈT@6$͐ LE1K3X{ Ex*dM'>; , _tkpUE`L2z]C c6B͠j`o]TP? +1d a! q`1f#vf>TKQvdr\ࠤ`g<"TR r nߤΖJ- ڛϐhdD\`(M UTp4k˒p,Eydo(_uV* yJ )mFueJsOBY瞕ud"z_\,88ٖP4F^>wu($ڗEDiy"$$VsA Pe9@=nweXs6CB |[4q@gzL! Ώ!Cth/Z1!/Eg' NJ12Dfj$%0!`fEh=[E Mے11- HМ6FVe K8 2! Jw(]45-V40s3)<ޢY7q_4k ya-AԒ"4E"eLԴB*cA>8M>) Õ.LK}4/|OxH\th"(Qkv37< /oP, `ܬLt+QϳW5 W1pi[&K]V3!Ir ӧU&h# R;ЋYuD! p9X.h 7tE`jI7Ӡk@)@a D5"=V @nsp1Bv9 ܱ -PY9(o5YݐҵDx "<)YE cn:XTԙ$f8*}"X(ga'D D%M@r")5ZOt&X3,2A5Ve#o J& /Q{yuM joV[A A" 9kU 0dJ-K6Ѧ`A[EAӃ~X97_+T`e*nfV48zCژ"nQdPfxjSѬ1+^k>Hr8%E. dLB?h0-gE<A z XmڔRA5r#`bsU.j0uF:( ЁrH'+SnC\꛷66Bf5+BJ1$A'W W#ooPgPxѭdZSLBˢ T@G f^?A-и'҅H*B?QNڙZg>B4SH5k+U|H:IBhaR5gm} pStreAVAjۏ oj+q'Vk D O+@(\u"Àeđ3^ׅĸ~)( '5>T2'Yl5gc)k`Uj4N!b!}N1 `|fƗH0Dis4\!vk<*?"u-F@QR `j?ʭ_=jǗ㳍CT"7# Ftn 4.XgP0+~!fBzy)دMfΟw<Ak>M$m"! BqOǸޤ;@xN ٝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ |@I d-Z8 O h?z'Lgr> ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ ԝ@ 3/k>>x?%#~jL5?;^ߏ߿.#tЪokyx!f\ %0gi~ 0)"7LJz6|(@FX9o K$K|{/)!H2gh+ԥ/ KׂxUa]y hѡa7œr`47}e6Eqɠ,:~XlAQ/#x5/֖} z 7Ÿ[<qq//Ϯ{ڍ_Y˹yO]an@o+ɲ%S3_=0*b_\{ͽ.8`]}+U2/M_q}&Pk/Zfoh= Dӄ]Ud0->oܨ=^/->+kcԗ] iO_RtQV^KQĹ* ޢÝ!.uz=^.)޸m)xJ|}6ѤWF|ӻo?,gg!N]8-qOP1'28 σ6:'mtN=mhD7S]7?)ȣ-רDs<_ ?z9{1 =$ͼTEr= dG; P4s|]iݛt;hnL- Z5ZY Ӵ5`u)\O;^+(^.Fk|w4䏋?`Gzk_o)⩺Uywte`﹫np:SW]BWmz ի[0g<Bzt((tutń&fDWy`U"BV p+u ] ]qI,a~o}6U %'5s=VĎ(cNI* Po)w,w7Uh:Mc2F M MKiѕ%JKM.th:]!JQ>1"])E{_ $5lt(%ag0'BgCWfCWZIB QB ]]i Rr ]!\r+Dm-ej"xN_D8VS"OWRg+A*lSM:UtϛuC+~JŇEW<zNcBC"2+Y6tp-ͅtt(,tut {fgCWW\ њ+@9l/^ | 9^7pm`>dp^Vruh́ {bީ%Jº52om[yynb% \Yີne.<j]q"PQǎ9Aa{"PHFS {_G*7ءO-J+UZ8i9} YNWd3 h<]!2zthKSAKHKcZk.hހPRț; Dɥ׿y> Mg 5dzD"RCjP^J5m4#m[:]!\gh=KDii+K(9\`I6tp_u Ѫ`Fć}>tEa.%{9#}f)>J:4gv+ZM_ʆa4^NWBWHWcdFt-VlѪ+DI+N<#l ]!ZeNWBWHW*iFt 8V$+JjƔ:J3X:[R+ʈ$n\hr>tF>@V /O l 2hU%!ҕfRgdt(5-0hN 4B&gr:BWHW`Z '˳qTCWMώ Y'Z훮:5v_SPʁ+ցX6= W׾ W`7: Q*^ bΈU"B&u%t QrQ ʈ{npZ Qg !ӡg T1֊xNv :@G=q٦2$T]ِTi!&g69sHթJ^9rgB_L<!tuP،0&kH.S Q2DAN-H^n6C-k^|D[hzs W{ww,2P8}7K.vΠ{}|5EhB>yl __r7An:3.i  p/;^L7 ib7~}ۓҦ'mvdu:5]tc?ֶf|>?o;U |kGEgk3?6L5}Dvn.OἺǤ}@;*^J[,zRPjuP OV ~ +JXjlvcc>HbCSS77A6Ǵ] 5lj 2Qy4#jx?k#zhJ)X̅S0Z@Vliɭ㛹KA~ALEfXc%? 8چ:"vsa; q 2/|v7>.ͷ/,Sfs-Ajk PK\h ֈD\(GTkkk5^(`G ~YTsj|LQ[.)93pS>ԋ;k/|1܀[\vvuQYbuf3=\v;|%tb%L+J1YG0v=Z/&K%6`**=MsV%.IVg=8y d5c2(SՁ{pop;ugЏ^sy > Բc1e^qny?pϕHKMr⯽&/)x{,~rhu߃_j)@M F*~U7Wh=A^YSḍhC$ 9>:̨WnspHmJ/EUQTH#.Yj֜Ip`8S1M<<gֳ 8q1n'*k~9kӖݡ߽uݒ|=)3+ `,1 #iB0.zIƁ[YR:o/wކwg<6P06%L{2{ {}[7hNسd69_,+7i_˴t"XU1cx%w^^ ? [x5^|=FKὭPm$PMF'8Eu*]ˆKIn]든BFcBͼi Fy\f!)${>̲VFO-OO%U_g}N<R#,gsZVNȚP"HO0|a >A| c*h`"jN*h_NB JkHMlT n0g4U~0SV},`,^ϲEbIb )Ia\h_'ZmWIZJM%Rl&YUSpIQF9ᔕxݳ :s˟\_V}nT釗y—~RuZ!ϊT}zS\+SɗSlt6;\FI 9t&e'IGj- ݽDSRF݄W9cE4U5aߟ&x3Q6ـM֓o=3-r.ň""HCt&XG4ڹT;R FY-|~?uPFk!( P7 2d=ބX4KR/]hq[D?;֖BNzW-'Cn/n.A{Epľ\8ui(_e˝|ۇ}\XTY/GkB98 LD˔1Y =<kDpP\Tg0SMK=mb9t!$#[U˝jz9 9%'4HZFL6k%*U.9Ý6$fuM Cċֵ:a)&k+nX h9PXtGvV{:2>2TӇWh :iUA@KM+"U"[ymd>5.!gپvо+Xa;G_敞ö%!Y!*(zEzH<a UEy̦76PKܰZ=o?ABbz9>w ˓ G_c'؇=7 |l ?GÆ$ܻm۹s#GvG9\|q65q.*[\.m} ߙzOdIs*ٔ6ٔlZ\J6!C/ٔ+lRQX*/p9Y+U E>+`>Iӹ{*,2Bm!恧i>h!j5k&c#\⿔q4`{(!j༷T,!X(:%,wVzsޫ/NE%p6M8_nNB=Qk8z~ )!tgUfvJM3GP/-p{ң٘o.S:er#k\.mIgC.is(l]asjz3yt U7fM[VزZHk{ۉ/Znhwn#hmۮ?Bxd}|ϫXkoa u%_6mUpP~ucR3>u'K{nR-%j}s.|bZtXe&sPRQNe%8ӕGV) B&OXw;u99уpY$^<)&:#LR„a:pF'4F%Љt.{i)9|qv:xFRb"NFH&i)`,SFEtQ8NQG[6; 2b 5 2%͟hQI4#2zٹKӢNʌf)Ln;BbrcV,[6g(*o?4I95[ڤ& Y$J&Du 8P!DtQdTF$*cG=L0Rd=.'Mi*(-,J#c1vZ-fb+ ͙g˸!Cf4,*G Ft[с!Ady-:K\:9*%X(vCv\d{8˰ɕJx%f6l916!k Jm #v1v# 㚋y(]L:UڝI.02 GJ=JP裆Acf vKJơRTȍ7 |:Z 6Մ# GĨS rFЩJU-vVzAI jm5Yo9 1%G8ESE# 99Uåĕ",߮C  $j/IeXAHg % |^5\Ct&"jP Dp`RT~k}܏yHLђ*r~t5 ؁-gh(^GD"&923>zC]2Z]~|׏_O?]_u2h?z1̍O?_rJiso~M_.~u-e<`77F>.uVBzKyf977ep6PObw\VdLOX&_z25әן~]Q}$Ydv"_^X^nr>:7c)u?ܸ&Qp}~:Ukk N/Oх yַޗ+XtN>\}ȣ* 8:y;௵PL,Q/$z׿*+F7 }3ЖC gII-=?][sF+(=% iVIl6NڇJ5W+dҎ?=$R(ȢlA$1MD7_sȝ98 (?n؁`e;Xr.o% nKȷ5q6?@8sKI+aH;5ӛe^}[2SEvK5rF3AgQFY!p% k`h+H;:YL%`(/:X6l.B/L;rH TD2' 0-N+HT' L=5 kY@qWюnu@u)t^Z͌T Y.ŽO ~Rp:ݩu,yM I9h/Rl%Jf&D`f G)-3D)( cgH6Ni7*"t DhJ.J8DJ[IL9B^#oz迗V:9xWeJk`EdŲ "Iu%{N.Qe#|T P0|(^~W(` 09ZzUj0 n,x }q.@`z>Rdl !Vh,8^׳ҵţlQ^T|wcr@a)ZhgoXْaZhL/'aZd`1 1LwMl۬qgpΟ =]eg3'iLB:X>`WX$Zn 7b> 0a"οć _ټVj u%)Vc0Kn(!"c"¹\WS@͹(]tb քK Z_ڏFN4SEg$&ifOzx4zl renJAF+AVXRpKZSR?ڦlzx]{K <.jZŹʂ297%q"H;sςM]8 i1QS3 SOBp.ًQG ,:DVwКNl~@] lYry>Ak-C'>C'x>aXNy9[yaפ rIQI)%xW2L1eXA89ٷdIؤa"-&Ý4mZiYlJa^i R$#Y@S0b ʮu{u>z=MCooC(NaXL\ ?J~Q\:-%k뺾wópGd_p/ܗ`+suNKnyKuɢ m_KZokmDOoV85q ܭ߃Mr:J6Y{^MLER,BS_hr0)ztMN0f@q?gIJ;w&%lSSrh em+3+Ql*s$PWIJE; J h HgWI\ş \]$%\}1pE7z%0=!\O\'c~Rr[pEWmKa\%q|.p :\%)++""]W -j vphz]AW@߿WѸfS|WZ *ֵ7C=@p\dCbXb=4u=K<:N]Xi_ZoHb@9dyJ!rU{5TK7{P:m*G1UEìȻƚwoVfӂodXYK/r4ƱRlB*6ʔx:x[<@8-vŎ;b[xo+-{icJֺu>4 'mgoUo5l {iSKF_2z"B;/qk5̃\PpMRp͇E$Y}Q n&fR:$b\F5LGZc2V7LG eZX D佖豉ր ihysqkliVޡd \ #2܇<( 6xp4F|0I(SEp Xc/nPCR ]QQq]`D@1,,VJeCmM 5l$gLF㋝ Жla"YzN}WMڶUޙ-הs6Ts^W|wLGa: QEST9Q90t`,,*2 %ZA@H1 N9ťoR4Ȍ ^`A ƜE\ؚ8w#c{JkXؚflg ҳMg3^X>tPo@iчA#6^ $  BNZdt OD*:)eA+iA3ɞQk % 6N0+D{k Z`ZD"ȭZFĹh藦ڭiǮP`oxƥqD;Ea94%a.)f)eF C0Li# U3;x8ek f0$('q ),28=r0Ҙ5q&G?6hB DGB0m1+JPg5i HE #*%N[.0.F, Uxd!RTL^jʈhF3@G!eLDEِ>ƕ0uboKǠo׬F6*t^̇~y#煮pmsYM>߲'IcD@=r09ӳΚiK k;DxDHܨq 0<0 /- ED։S8#B'*bcQEw5 V)N0}0,qajMH{52{30C& ѡ㾯z3upqCe{K*Z?e o2\PݣOEz+lzm!Y+kqjqx\J=,>>/0ON- Z= =s{z.5qYуLl[уg4%$uby(ڑ>!?ÍqxuxWT ]vHGi8t6ڸ+&V;ϐ h=/<^ Qg;6{ls'] Clt <.v3A]'l)oc׭6t;SwFKBwæ/5CD oMz7Ι.rm6 N9K Rh)sr y0Ax2T}yyj[}w|Hֲlv-n:.\$"e+٘HI*@RbO)@*z|JEío&<<_DK,h-q%0-NDBDk45"3 (.hܗc#P("ɋ%tM3u,9qncu@G'x$^j•`dx"`G2Khܚgk:D4MTRI*89YW>PfXbu`1Ó䌒L"U8t^K؈7&lx.Vh!i Z(Yˌ mee;gϚgɩMKC'*fww_ƹe[YvܻMȍM(sfq&&m >NGh+,6V턠tS%~7q8ywM:M˵ Bdm?'!KhXFѮ}+\7~TwWo٬;0INV=il:LwYv4f9,& s2s{/eXnNbB^:ϦMqəN~oէ!\-Ny/t7=;2mmٕ_r:D>qR[m28 ɤN2ͽv#bH 99YZqe"6zp-y/?'hC \2DHpIiNHQ)Zt"r-'"kakzXņ`ye|}45{I>_@39LFIQ:# Q%#ؽ$zP-B,v44FC+jպݰrѾy!1!񽿗7,=u8/0t&/Jl҉`UŌpWy? ;Z5OjsY5|V{Pm[K &Gg'4uI&KInDI*J")!d`$PO1&$ƿL+Mx* g?0z !=H;Q2o`)Cq/ف\>%B^}^p-}>m^HfMsu6\b a9ӲrB&BIMH1B#dN(Dvz˜MBs 2WN $ $spvo ]mЯ$?}Fe; s-Iܩ7zź}>FBSVb6+.z1kfpU>TqTo_QFY0bę#q@qJ6 aAA jIO&h:W5e\sAZHVX;Ef3`5`U.GvlSV2QTgv]~rU;T0u bFBxUsANqB<׆ ~TS^y| Y@RO+"U"[ymd>5;#V!)H_C=Jhi .CtR+eGluѬG#,Jr\4OqGz$ *I91wF3ϓL$f^m\JEn֙~#{!gzIgsB,y|-Y*hz99KcD$mt F PY MJGğt,h6EP4P21C7D/(18-(KX(%,wVzS>@)6(]_S]S'2#tUVf3o'lszWUTMft‡w}}hMΡAYݸl:'8 R6IغUzw8zO63ͦClYa˪û5|x睱r@k-w;r# f*IIC@엝 u;vtoEIv/ul:lL햏d*^/?ؼD۝1^Aw>(Z94o'nû:3/9}7.w1}O3i(q?C<7|͛v۷t~y˼-XDiz|O%]J0P\ю;{G3Q5Lb˟O^K3ʭ{z8}}}._()Tbb'lgJ)8cP8$m%AG٣|GIE]ێUT7&g~%'8-1~F2OsE]2{]5_ֲy۶Wq׳d6WVsJSzIL2U~U/p_*?}g4,xEϕ6^/[,lN{VIn9ʖ^k:f]HҔf\$ ]!ZEj,J} .ŽNy6I o=v_@;0{JaHyIǑ^/{?yE 9ZXvsJcRzZX KfڨZ !([]0%}[ݩVh4K$U;>8s PIYn2㭏!eP52:rFggLI +/fR !65S,;\ɪibȺ}t 5&Uv.\qLh6B(VJFp.x+aj*ӎ}egr̸ A&Op OJz(n3D:!)g%F:=c-*=)3X 6,T^ޞ}k(N}bWC%mKJ2m崳5#j2yЁƥ~04ؘ|,P,b+(^ToRRD#YjЧ=0؈|쪤>`}Ӿvkmx=ApD!Qq7{4ԢMxG^ݛp% npWM=D%+pur]Gv햳 Zcǎr W+IkW"u&^p%j FcS̈́oWeӑ; &{0jZ'\ SG++pئJpW \5=t\5&\ PI;U \Mt:;v\5M:@\@#\`arWM-qTML: \ifwsrբg<Njw+R8oՋwH*jlcD௳&|~:9IIoKyZ6i Qi`o([ð[Y*YkKě3Ϯ>6Vj@Q+]OO+*Ͻ54O`̫_;ywCT_q fڽsE>r3NOzS`b戰'5qSs>oxطOjeb7Fe12Y_ۉ椉D}m!hOY_:?7b/bYַI2Eoo,-w*zAϠͨ?[Iv]nkhh Bl/C @BS6> -uJ;np"ָ㪩4qe4]vKVpZ.$GmF,7KzwsquTߋRƺr>Wp~wbGdz۩I}w}x(_?(ng{EK0lձHڜT.ޝIޱ˫?xxvyWj ?J~Jڐy"AΜA2Ҏֲ#Si m޺Z9?I*Wg'7JN6jE 7%B֨#Iz ~_??wG7Kr.ow9ۧVᬥm7%3Y`z,-Xv5F5qTiq :;•3UkZkƎ W+o,POѕ7kr}S04d+޲HZ f7 Sa*fyxcUK D_Ut\5&\ 4W`JU઩P04EW+d:•] \5ڌWM%Åv8P^M1]( $̗؏zA%0ڍa媻A0-Lx;ՠkc^SsssFz˩2hT+h@L? 0npZ D-*;ʱ7EpeAkg:•6t+˖{fS 4pr`Žp%'r T/jjэWM W+Ii 6]5l{US;+QjJ\-).a{\6 S#|MzlӃ64s 6t&USˣUS&\"v#\` \5ZqT2L:@\!zD0~Ak􂫦qj*6>epɍaW"s7W pӦ'4qwYgfgzrzJ,$ZPmq?eIoW~|qeſEli9˕A-SͲl,w ?ulzF W?ӐʧEVκ@'RҚG(1v1O\PN OMdCassh^+!R1 SE Kkީ,b u}]0y0Uj7x.@l'@(HoRpA|\utAZk| 5Ih.*I&]j24DQh 碍obd2m+ ~1aJHe$CҪ*.1Pf@熀Bhe"!o$t i͂ ;*P@Rjs%Go{!L!*X/V"$Ik+HHKd-JcQ"\vKFXvk,a5F E$\r*w%KE, "(ԠS$Ÿv]Rއc1bأef<-)RKR>f}_)DIͫ**32⽬xQqC[1N[TuLr抩˔ lhBA{1Ti,s J W4"Pa. W8nvL˥"zY|RsyV̀j@o]?B0.@Ƣ]ޛoC1C+$B Lh-QJ'Υ*"xk"5kq 0LX;pT$UMɷ&:ʩllK@d86Ud&+7/4R2AV%T gԌQ"!\`#)#5`ՠ#X{贇"=W/: TER+ٕH27ti)ոAQ@IBP A5"IX\Պ!1*`l+GAR 5J! ȲRN5nҰkMk?fR3&i~ 7H[v3~2mǬ*$c QIbYjˉs!u@0- |qmotirXy׶5p9n nPA&'z|eM=B$q`<V&^x1u@KJ{a2]Wҥ`ruA"X L͇ɠ< XQ5:/Q%(x$H&rZVȼ"a>X\8Lt~bmHm( AJt+Sx$[ -x>cHp# VϓW4+JXm))w2<}?^nl2u[!O‚.&jHmE+pmpi,{%!zpC$*f7]_GW1* Fe]L)ڽX|,#m60Ze 5W6+lE^H"\x>W+f/S=G)M^gw>hv+dm@?]qNTJP1BNuBgŏLp 6}k V$vsoj9KaqYS8^^M j{;FͦpT|-:N)lRNׯou1뜎Jd۽o͛WOW .NJ2Rd$#%)HIFJ2Rd$#%)HIFJ2Rd$#%)HIFJ2Rd$#%)HIFJ2Rd$#%)HIFJ2Rd$#%)HIEdhf4jZ\+2Ӣˡ\̦B< @v$Û)|o.'[̮tVh7T_Ns% Aӭ5S\2,>NS[w%^$żk[A%JjڡE^]jRh ﵂^MYv2ȳ\V[Sp":%YժK 7 ?5Rs/;7ffۆc7XZZzEJͬ4ϙ@t\E(u,Lg`& .qdaH>#H:&R!I̕hZsϼoV4^&PivrC[X_G[ڶڕV=}ü{ֽB jg0_~FrՌќ`rotgBXcLu׌Q!@=CzP!@=CzP!@=CzP!@=CzP!@=CzP!@=Cz|h #\#Fhk昿Zgリ7M.Nu.? tQb4lq*!+=:,w OnJY{x5m ]GMZLԺ9Oj\cRa-#fg@*ΌGDE^h7u C'ߢ 5DAi18ܭvs@vaCkX'jΝ*6wdG@Jj~? ?j/-p({^r/eӈn.d.]ژP;UV9(,+R(/TDog!w6Z4!>Aw',b~B[{9}bK /_ݓXF.|uλSb(J݃+؅aղu<;ufNJ_"I8pLS}Vʗ&+%[RW6тY*_e{jO\ 3< ̜q:.\ύ&,my)|q֯ U'AկUo-j1W_ǽbs* {.@ꔸ\򭌆A ,0mg .(:Ȓ%OX-[v:@bGMb\Q61BIb)j bѧNYg9PZԬ.A˯K)D˾0X_+ MaYuWt=XcllCq t_?/ v_K A0d K DE*a͔緭B)BZA4~R'*Fàyq4)x%Ez앙Jy9em\LR"LpLiv;y8ǫxHi9e BAe:'@E T`9z0C&v0dSkuo׶""<_~SQDʧ;V>PCa +Zh@GsQ E `4k&Q 6f"ѝzXƓSю$ځkAi]XSǫqm,8h- 4hS_B<+yӉ_dI  E59K1I:C!5$rʱV3w>o,=XiI%<[o}zAPU`Fn*0jyU`+0(R=v}}3BϷ|uv:TԘT\FF StGCr\{p,/=5t4diIY-rqLJ 5yqUH\xEAJހMa7_PYmurYJCaRZ(Ȋ%%P0 ^:Q>[Zh)XБdQ0 G 2_"by&9ktpڇ^NHn(,SE~COM@6U7.v݋7ڽ4Vx ~)GN|*$whUn{_²؆ܾ&m|8)^=[Y7;)f"W"碲'+e1mI={#MPvȗ?|~Y5Q*ulC2U z wuxUTw {/(3nLXgpVd5f8A(j ,m M_(, }AŜB&퍷QB(1xkbb>d5/.ΡMk6LBB~q3Mbܺͼy:G=19mܲBZv~C/TynywAzxCd=8<[;$V|eC>_]0/o}y4S?wGڡ8.diwuƻtmb]JP !uQbT-Z)VG]Ӗ䝋KCsv.@ۻ0ox%Cr_udɅoGH2U&godKTl SO mDi o4(\"$PJP[j], 4Xˍ$a47;ueeMo\vY}k٬ sˋW` Ef)E41*RЁ&*`+&Chl1}i!l(Ȗ@[nt 81yg+IV2 Xmol~T,07tI#S_(UśW>DB ږJ:|$UP39٫F8MHONd I!֊oUW1tHb!9QTA%AJԭ"m;VC*7AWN(KsNcN2|(K_s:[e㸿D|j;vg- Xh uǟ0hZ#]^]j!lRE=V sn4Ţݏn_@KCoug^NK7lAoYgדj_+O_}>,O=^Y=8Eɸ{Շm|>bKl8Īx{5󰎴vTÉ5rI% B7{/eo!}NB ~Y*ð/u+5=bc7}"H _`&\\14[RYyB[d(?=ٛ8vjMPȳ8F)ga:8?x>oז?8|Hxø% f?1\A=OUm!JU;AY+`>^_!VۊoU']*{Ebφ*Zb=4kb0 X$߫e_ t}5c/ÄE 3 ?D k&*3e9*/k9aub3[^k`inglۖBrr 6[)!!{pL&] +ksbT7l2d%iZv~+V;3Pl*de5_ ŗLXIA%C`fmu: H52'--Ϥf~Ic><^uu $@(20IuSVzt>{%E3HB$+?UN:vKrP*O%$7lTQI eMP|",N7LHGVZTL9H7b ;VΆ_Q鲺Q {t_$6gn.6O;z'WZ@EF$hl$WPPRy/Z`+`;ْsQԪƹ\H6=\rhsQPjFX2um;~Vv&ϵnTVdp~Bi 'ޠ nx5}p1!lR+g"٘J5.fdQ6שX)Ȼtb+i^d6"PC@qovM$6A0XYҊ4PٖM[d-~3s 0½g!mr^[N- rHp6873ㆋ֮:ڪcN^gBjk%,Y(QC4|O*/"Qp@ʇY*@㍤4${%f-ppFNڹ687aԯvFbl #fweD]3#;Fv+)L,3,;3zɈ$[(tZ{h&keDk(!J )Z<F$<O ͝"0[3#fF0AW{rIkY]y̋Ŏxdw #Y1:RF]JepћxNjMڬ)|hƇ@a5mc*uAp]2ڬ7@o@re omc{QߪN="gd85NkdAiAlW}(qv-43Ⱳ{V9 kZPi,lֱ&¹ 7*m=(س(m=^ej/>PiڠVcJ PDŃ"'b9&Dp)&+Z*Rtѥzר K|u;'[u{[CL2KGrd8媝"=|3un.gAmb:bxffK0"ō'%(px PեUg\_2DpXޠmٝ[,qNL υZg^F3,HZ\GO`"&9#zg2ƎzRO<_Xm ^E!$:,lUV!{k(BG19(j;橇yP0Bxa#.i^xdO(92!AX3:-3!q6&E7kL՘A_ b˘C8 Nnno'b&T^t8P)Ϳr90Èt;i4q*k|;@ LW8ל8*;ߝW[?;!y\nˏ_^yV9Y^/|H21QZgXDxՇezo߾ifd> \|v?qX/~*Gn{ 7|eu'SDǃOq\ܫr;]<}{9_~K L՘|4ǟqr!c?s{? Û)ot!]KbmG掆ϗv/ltd67p}"V}x8B=Y  W7樦E}U\9gW+Ksu;^J+e_RÈ,%  }XdFB0 E0§Lf0X`|Y>F>?*NK 0'P6=Xp6Lte?\ ӷ,asC#T- saC7M%t@'}Q@iC`8BƓ1<DUȢ\tEpi>}u SBCѢ]pd8=tm԰ia=Q9qNjÛE$f5J tw?~b{*x{(`B]X|sThohxVYwz>9-GݳPϊO_Ave5z@mؚzrnb{}qb~$%eoy*2x5ty9öHooTָ+L?97heՅbd}qy8."@?._cGi䚆)Az>0;6| {qͥi2ϟp5]t6pTm%Annuopc0x<}hEl8]J]Tz[L}yɢ L&r'TLm,?LYVYocp(G@G8tǓr )|*j#BJ3ϦC~Tod V6〄<٘§e~hX\/W:kQX?}Ihȅ^z[d&1$:{YyOg,dFdgw0p6u%F /p"PKx"ӝڮ;ka'u'ۨ% $VNbaE0X&W'suw9~],JhI9*w:] 1p4knBN pM_ilKȹK%&n=ϙٶ,-ZJ( ҂[Y]!ZfNWRȎ ])#-+lڳf hAB6HWT&gkB5ODt(yD)ҕ"B=״D.)ҕܐ6+CWWfƫ+@iI ~=tnOa x2`o .s.Ѳ=|Jްx3]Pـvm ]!\BWOWi|ut(tʼnxܗcKWx;5ݗ3 JI7<] -+A c5tp9m+@+x PʦmKӌF}٭2R%NMOncY.Tkr~d_cҹdB6ۘl^zMSw']w'Ss fc熽 ; )kŶC|cʱڬo좚?k8q5no? sG,' 1ҧnW˻wӚ;ÿ7X}`N; WxagsQYI~ϣtX\{uzjFk./x1hWN vמ^Pǣ}W5B}u1-(}<x=%%40ޔJޟTWC fK9{'0'B J^׸ CxCсUu'AbLFH~N)x"H^8D`᜖L( EI+ΫħFv  |}Urn_XW1B4f=x*dN*h4T$d $r,LlTqM;X .1td TevVzK%k[Vyj}OSl,lkeOAlY.N7Ni܊JN0Je^!>:'i܈3KbV ]qXgj3"ʓt&U*GhA(5'WLNgv::Se Ӄj'MJQK|4@N >ra3ko Hժ3QB ٌtD}2@M*Sϸ(QpJFRT<׬3ks\W.GӕۨϹʞ;]`N ~.X[Q'UA0gu%b:*)QM`Tk.o[Lj!Je@'h.)}^Cu") k?C0B$ М L}mloyA&۬ljJ˝`AKD1"YiڤstJ ΥK%eYj>h!렌BP@8fLbV16Fӏ!KۈM2B"I۔LL%E"gMG5*[L'mٷBO?1 &L$9|qBq)Md=&D2N:S5S+IKaؿ1b"89*fLIPuWrfBcRkFJS()NT:1I'Sf:}Ag o+ϓ'i[noc q wGY#HTF"2X]D4 t^Di;~"|}O,|-ă!r/D@'6&N+h*IGɕ Z9#%$vўx,2"q}C,{ N;&eդ,-=ʑ Pʐ =6SC+\?PJTY!…RY\; B:cJ!,q6pWB #?2<R4?VESMF8Hc ڂ#9#p˜M .JG[BP7@ HF^- f/DTG`߲i9[1\/eG S'ذ I]LS(WPO[cg=u$rgѕrhy62+I t}iB/=gT0GqQd̃TXM !BKJsB=@ČJHε$YZ5АH1|Yj֜I-ДS1!J1p.]K5r6G'ol'j6FglX*yo6>ylf>|m4րn1rf%@#wfGKF`ZCD #V޾~ih6c98W+^oȿ[GM}ΣM%oNar<ǒ]*Euzc8Vv-Tx9.h:׼"!}ʯ~\a22Y nBބG&Ӽ eV1'8ÁHT ơim;X׆zy yx>C^Z;^ ߓ`xK5UH Ja)d<b*mMG5UdWSb$9'8A2k" T* YC"ηQ F'( Z˺zai` 4pzb,Pr̺5r6[_ap:[njG~8_Q@ȿa6;\q-pdNVFh%"'FRhZJ[(4LjMeD.45AxgKPC>1Mђ(NJh-=&H')#)*2E[*"&#` bd9Jl'`6&Q$f &|C@PM\X# yc>m|&@ϼa:/Mz0sCtIFr1ʼn'c$cNx` j(%}$xed2bg Ya]'c{$cy}_-?Mw]}SZぷp 'T /{:Pe3%ZwB -uBTP&RHĖ^z[&&y :<vfMƺv{p5vkXw@2D42DEpBo2"h%TI˙MS%/#P\A"㍊aX̰]4??(W W>7ka88l($ih^˽qw[ۜ<<']f7Xl^ @f>_^92dq*0n1wz6/ :*8܂ ǂw$F^zM)\T|ґ\v'yvk[(4F]HFb `XIiK62|@E ئ/ L ^Pcp[*PPX(Z%,wVz 9[kH%8ݷxp69M0O o7rhF[kHFr~[_5Լ=nWV}fHb=n躞z4U}6“:Zy+iӼgM-.Bw{`HR u`[Wϟ[ַw9ntfs}[G[زuno=_6y=&-- ]>6<v˝nF^KWܝ[ԷI,Ysۮ}4$v֥n.<&^Lx޻|kO+GeΆ+ҼL ̻k属M>`} *@mIy"oJ tRM~{\]^{"Hi!G$ W`X$OZ&Hݢ;K|brH}*?C2 &|-44@jDBC# tȣR5Sk.o}U jY{|x{9Kqj~מrhL,ORk!<ʃy47a@OIo25b|yzpn+NjI2'!IHysx.EctΠ#uh;qCO1'a!!'+@%Q-hD$"AeŊV+>Nxa3j.ҳ)Zs8wאu ]m(#|mK-m,qNLAυZg^F,H-5)vh2{;Өz:iEӞ{"`u%MEHV%r*t$>hNx4N@d?iߩ5W/S9q8XBy q ’KIaDQgmLN[,7B_ UӢAf?01;Em{v}E6Dd/ij׍nv'o\a$';i4q*iԨ!*5'UUpBN]7mzt8ԺP]K ;={3?qR.(GD}/~Yvvyd4`y /<9|_7{7}YبDFa\Ne}^һt.Z^T1M{+#'/\_OƳ?cӺU_#87Ji_3Zf,`yNwFX[:BMȞ3 ]{\@"@5_|D$1!٠ s`l+b$n >Ē":3RHagހ{]ҧS1^3-`٩Ͱ.$IWJ0gcV"giHx1%T RM}?67YJLk H aZ5DDk4M+MR\h6nW{uӬ?ji],6mpXZ4ұ V8ǹBĒNHԄ+1B+1p۩AԿ}3oWiJlTy͹F[Z@a% C%g`g YC'ǥ2i2""V1dM@X%4nW++:Ǥ:G],y*9u'lGp18s`r?b&m2-#v|}9D]En?L_W2eƪդ~a9!T/ 7t]M/.6Dc`._gz5C,jj#_Q—Ҿ+,ʷowP&9Y_N0WCL4bӳŷ&gkCvIE5snPLe}WݿekCt >6]9֬ NĻJRJF ,pnCN/V2$]d irvmH?%.Kb| ݱB\ik~x?S^w2] 5<޽Z<LP׷L5ݷ 3)fZR4PKSy :60*x3慻Ŏ ܌QU t\UIy3V%7|[eu +~7@Xk>biF nmyQ̝K`A"$L$BhHs-*ʵi@73IBSQ,7(M4sL&dg6@vSx ` iȖ{.OdXI|u2NO]w00vR`Lh!*%vb;6TntDkI-1%SWXڌ<"%eurt)HI:&fܓsJ)kh<17wC~Y6fӊ=-k"7ǽfRr})Uu1xep}gܳq{AfvnطRڧy,>Ԃz즄MUz=J>Kl"A ~ ڨ<ƌVzq~ծ%G^,2w'/ ~ }*v|Hb å XNR Q{w_7ܼ~ x(zn7v:vu .?זhç`)#Я%5n֊/u OHɚo}q5gJU *Va~  RRNe)8ӥG9̞0yY[{֝-:=N}؆* ϊH,wI G$:8^xv A[2ALͨ\JJffڔxKHe#9RSڳSe ^nQ |-p\ "A1eAt)rC fFpRKQCMh` U`ن$ Ix0͸j ^kaCmL͆αA98UU׭)}mJO% 6*kj黟trZxWr#gqul2W5^?֍-|ɃG i jMv,&D2NFR&Elnx0 qɕJx%f6B l916hIXC ig3b0.mAƴ-:<{O5Hpa܃& lsNïx m)0F0Kx#gHB &!dqLȗ@g$ꤝkg3VA-ؘ~싈aD"vx/_X~tJZeƀfsR/DpkNkR#sh %DiqS"5Q g|'pA fBs mgB_l)PYgbG,,?Tn.N> * }v,uW=ȋay1 õ.,6zOշ_VӢyovjWtDͯ)|gz5NjW5T7BiX\ Q <.fOsYL=5~FT %篰 tZy.(~MQ0q??]נG)Mt%FJ/ƥ΁ZQ\΍-k~yN䗋A_~n:LXFn 706J׊olT}W)Lo^d7Nfpdž`)>+ N@[S3\;eJNQQ- NhumZ[zOp/ܲ# jtj`+xNFRJm #{<_TM~<-~cNߤ˹dm펭5ej3Kv R?#P`ˎWq9⫡´WC)5|gY&8&Lq Bih;\\}Gp%zqJaZvb7qkMJٲ`PW}JׂJbP*їgZ Liedl"~5gf[}%|1Zi?Mǐ^" f{DCbylP]WEuMQkbT/:tMCI|1U16aZ`nQpg2'ae""[zmvd! :d4N -Gu]mtcζCJ,v - ⶛ Oz^hwYç`iZ+36s~ ĘKFu9E/<._f- K3̓=ҖY2O5Kugi^mgpU&l]Scwq*"/WZ4p@^|C[dKHkZjy.,igO*Mji4rM}wt}uw[_]}8/<#>M+3ӻO.ozO)"dx] 嬞ͼ.}荏3񛳿e(?{Q v%8_;<9f >MΓse)=-A >-m&wn&ܓXy!Qfuc??G{?+{`M}15~$ch>>G~Yg:ߏrJhݫX>Ŷ]3uB XG su4?7?JY;Y0D.PSԆrѢuq,0> W" a\Aů+::A\YJ}GΛ@m6{IawBEg yvera*<)Pڡ[.摂LNqY5JwXKrO2P);o?=Q?߃;|xoBRF_vwȯ{hك^Ҳng~dm3ǶzI+s;V~F+EyQϏ;Z7d(Wͬ'14ބaD3 -Z> *)-ȓ7* +0ޏ+QϼWPԊSĕg8 8XW"GW2W'+Fb#N< vͅJTtp a!8q+Y'jyɠ 벘SU ]W8ѕȵÌ]Zx\J^qBEg' 6444; +7Wns +l~$V(uJ+NWF)Ez \`Ja+dW\ ,rK Jj? DSKǕq)JNa]j~RgZ/L]?NyCq L;)8IaLZiQר17cJrsJ3 -W܊i•6~\\;L,j]\:D$Y?@NWKA+=`pJSUb! +qpZZؕ4?3?>I0c;M7ݢ\MS4v5M%/|q?W~s•>zir;x֐Z:D+NWZ=$+Qkq%*wZqu:2| M j^hLo w/ )f*NrblvV\! x`IfvVf:΁Cӌwgɥ5z^Ob k,93P-8)ȍäP[E%5>=Tس}r;iV4EQ0-j[:E+O*y \AS3ck2M[|T)*}Xqubk `7΢c(+Qi *XW"88X0ɠ5Ǖ\vD+ GJ!q%rWa]tlzPb8PZc]M길6)\6=9cWl]M;Dm4KT"tp]•vn\\FԺמx9ғ_qu2X `7N2(rW6 *Zqu屚ψF gJN۷IG֩#m!>I_aŠ6=`$q"V\ 4R<+9IpssJz5 DmpKǕZ)qe;̇+ƉDnt j_}V\ uAK?ZbZ] DJyG}M*vw'lzɲҜA6O@m޴)&zwkS[܋ZL"kX8$bɔa$"{4;.8IZY:#6@ֆ ܬcGQ!zv2`$ZaCD.(!C6*S]Hky 883 D.(hq%*튫S+$#6Ap = Dfr\ki*vLÊoi!qp%r0Zm+4f)qYĕ n;?oߡ|o?e v^ۯ?]_:ly_oI79m$c &GUg9ZoTn]!/[!Ni{IYpڛp7>hkAM^DEHh[es}V9>`>;JK.je(4׳jsʖB3̄*TQ*6sե .d;Syߠ(lrڠ< RUA6 *3-ۻSW89YTޫ@JT˭`:*ZZmQR)]߫ULN5rRΕܝ"RlA29I6F65z0-sO`KOIޡ3֞`͈1+>nL.YB%HݜbTKa2K f|mE&se5jE"OfVXL)&XU9*CUW)NQ੧nB =7%bfaEʧWB fIQp!QS#kw:va%_f Bh hA9Z- rU# 5WOb;^i#഍fdsQlT%nŠʳ-ǚ`[]K'qJP9k(vlu\s . #+`Ek)kް֞CQ.Jp\;wM"[ELzҥc9vQ\5,,#l2c` Mhppu/ >%)B dP6W%&#dD^j*Bc9t3+Li"B#>S! ,`#MABX['ڑҜb  dAPe"v-n e983֜5xAEG ՄBl\ nSl\JL `,C{:|G9L&v_/Jro[CAN)Dpq92|^j` lgYRl;:j`6#!em&YrQY.%PDS+'Y3h@yffPndr^3RUeĬdIƘbccU$Cw"B"&D5aN{a¶\5[Y?rmTЍ=ew!!ZIh|TxKƛK9X8<;@EVLD!]I Vda`8" '~|Y"ΰ -҃VsB( I$>Zx^]ː>!k F<]/2h^xOCH91/널5P2& 5E*  Gnxkb,©^_"lΩf50ۆlbba$ Sa&\]^gTw~4\(TedqmB܃0"hw.Sn0‹Y$ w!86g]G^!")C0iV/29%dv6%)%cYA AWB5#'J5ft roٛ%:T5kP4p߀tF MM>N9ݻTAJ(+MH6Y~^oVx{͇&zoƝI%&)OI)Fēs sχ`DolfƠd6N٠3>V89 [f})aج"1l^jS7 mQ{_h}l:ƛR Pa:5Bv @?Uќ?$$ <ؚ`@@H"$KD9ׄ"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH BX$ Vs@,kE֙}G!0{H @"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! E /A!p@~0gGaT%"`U@"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH۳CB 7 Hj 5!^$q`"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH B@"$! DH B~@Sw[wn6l&/߀~R˻xnpx#,c=p28^p{=qH'!Br,Z]]!^ݨ+ϩ񳫫3k󪫇Qk󨫇Q v3z76nMۮjcV-@3{t20#;(6k.¯-$ͷ>变:"ύ67ʥ܄TcNc%|P8=݃,bg bT뇩 iHT7.ӏ?參Z4b,ӃuJRlEu5 gT!qURe!5($uJ2i+C6̣o_רazz;Jbb~LVi^Ըm^;Ecpq{uSaG򷧌SBܶKYQ}Z*Ӂ`lVhɓɺ`5&5.S6RhRP>Ƀ}F؋~s0zVG̀kQE3zó3~auU'0;Ay><fRkO0DdK~|Y6f[A!n?G^1r o*{<9tf4vYMl; 8-_ k<0o\&g``oMF)T.:Kz~NbYkF.߷.;=V|~[̮.ތ0S$CѺӲoD㩟EŤ}xuHۆ.UX\GLdh4sd/aFXmSc-̲`έDk 6lkڷkb6z S,γp/֦')t7C,.:]c{-l_=a\_Xz巎/i+~{NO࡬ڹJ"%#??2uz}$[[w7tL˥9,:&?,Mt r*XhjTX h|d661[q=w[%p/{< ۺmdn#a-a'aiW!HSƤѹ8v܍? GKӿji YEаZc6)D+EY2&.gf!9 ɪ ciez:%9R ѮbMKRes/G,ޝ|eg."ݑSZAĈFdc-m4^:]~kiy՛QmfOc Ϭ3Ӑߏ\mV5;8OŢbj#M$?a9aza{E1?=/:i]; } ܬ6K~yoe*L h YHy}gUYOLfQJA_xĕPCq=Haԛ?mK~) |r[w`H>\ez5zx {Ze;%@3٬lqfqo3CbPڹYWn&W{|9LDϻ0'!g]*'yn&r|4C^S ۮyksp}7r # &d N֮l0ة< g ќ5&PʽvV%4;wѸn<&|ۄ֦`x! ?db$WCI&Fju9'#/2-#mҟf̋iۍ%BiD!59֨svo'fuPO"ԣhyPU!r1%Nq?:(uG]pnj2KZiSmޟ%~ѫZl0cI UGx5ug|ܮ2 vq##]#/]{(A?Q7qݱ]Uǀ=S1;|a<U~Jl2'pƜ^Gn ŭvC3/\ڛU$=)u2|[KSC'eܛsT~.qwmLE-^]M}77ƟǓ-~Szlw6߮zZm}{ 7O>*i뵶=؞pS0Me4<*φ_o}?a*m$2Vau̺amm-YZdi +@v [èIam;:0puMO65;^ IVַzx%ۻb|]>mCUdEVt4^bT`[H`al-Idۤle;RB)FWᘋRrT\ Dϕ*\T˚y1`ŮC;#![UO#[ko1s.KfnKR9͂q*H=6 jQz8:n@OEW'g' F$ $VHD@ug-f6S0K˴2Ug{+>yd|*E\M&_Gwŭ1d*K6f0@$kҡxp쭞h@0$ʃDCRp^6DGN$xV_uo1֋b m@Qn ! N<p7OݸuDRdͅs3-%)D u`\H=s  ]V!e?ݍ%5-KVKzmV& \qbQoAm(ťNMb_i??G,lwuAQ\<]>>zË=Ub'.ޟ޶H?8sƦo9úEhMwVYv8GsGJsq9U kVO'〧5s۷Tsfv^tq^f7;N/o08xNVꇌv_O_^\QQMcߜ=ɍ|3Eutz: ճgI7B0kU\k aѠthGar6qφ]7nou +;z6b9l 5Wˋa;l^ï|vF:=o]'mz\4 ߛB}(n0󑝿ޱ$o>;Ϸyqtl<XxYu:d8&xjbvdp;eݯ==Bx\޿-Í4wqP='8,qy aZU)gDyp,bP!@y;;߳4T奡(O8xOOEFZm=2wHF\;BhVe n& G1ݸ+Gp}gq)Pj^c7~#T3o`*oƘ,PCT+Q;7>ЃOc;Rh k6y^:Zp2&1h }z.ܘu=n~ ]$sl.^wRdxdoKp%x񬽧䘑jLojs̸ˈyDEETl"*V(*{tjӄ;y@D2|/4Q^CBxh ;emLB, 8$yD9V I9E !E@̦ܦNe^%N}iXL1=BD=[ˡeP 1 :Ġm<4Ogk_n}Mc֙4l zyDyQ\Mt 2 7p"D"U3)LjBUJ`xN[ph-!2QE,̣9Z{HURY@] ļ'X0% Mq&ѴQ#Հ.a a:*e!EN8ƮfLAQ3z]O$Iʤe2&ՂR 2s M7&poYN’(DwQZ.[N OZZ(3PS)jZ~1>NZ~|Ĉ.inոģ :0*9p9Ek!循B;"cpp/[C!v|e:xmLq[#eMLIT+Cn.!bHgvMMxzk+D$fXO}1b*rW+\GEp+1MH SFD}ApiV@ y3dn_:Da_Cy^Xmn\CfȊR3^nLIiD?Y&$&lb1e QG{[a\% ٓ6$y9nkOO{o:E1 <#qyd2js}Y` [M Y[t$n {`0FGnkg p5B}GqfkI S)WQ`D0Z& MRqB])&e ys9WdQE8'2ePo]uɿrԛIq<47U\G S,3#TK!8jgq@ `wJE 6Dotg և<GW 8P(|7ptg^^8a4Kl)aY3y Y/NzQ}PQxF<\T8gRpj RuPIMؤ lQ`ąsa=76p*$ <#:j>1cJ*Si`TL'tdtw:3=S< J>a*YԊHX8"!Д1Ns4ݸZшt MR`cJ  k3j4QHQ ]PR0$gK4@%Df'88!i9-~QM>Ⅵ}EDo$(?SPl+-/K ݦP:-V "}^*:4UyNx8ؚjM`3,/We(C&1]E`  ȰA`":Hd,67c[Iy'CSڻ=lΠx"w = r{b~P*Pq</5xEMj("TpOL3E:ϩSQwۯ/UzwsekͼDw=,GEsa=2el#l;|iyu-Dɕ0:z&|@Y%&25h:xاS RKa&E L=u}X.coyΏdy{9)w{˃- !_zͧ#܇>Un~~n[Bfo=)סxJ[*#X|QqIgK֡C|,+3/62oV&]ьΗgncމBԿz+ G$V8tmH]R|PkᎴ@Q4|"`uB܏Gm1:~sFemq WU>,k 3Y/Fge[R4 -f!;W==COYm߯ L]?.wgs8}&UOv5 1e)Ek?]QgYGΕd4ڑni]}qj4|xfa;$VMsYe}>ZD ^yre&aHjkft mm"fgcmpt6N6w r8W:?Gs|yn<~mVO>Ӗ8ԏ.H4r,',Q3Jۨ5@0DY+յXݼ2L3W@kDoh}׎ȱőiնՖmC\mٮ )ʺYɃ`FWp9~H$h:KH<֪<*_AW=02(XfsۿͺݱJ4mtqmimbݭZu`sK *E P)YH:*<43:E8 8C#ou?$-o oA71jޫv ΄9 ]-p0`۰Z`WXD6x %({螫i/zJEth ]Q"mUF)HGW'_y0M WӶ#o:]evt(FZDWXpA2ZNW%7]GRUFY띑{S; ntJz;QpzCx%E0n ?p4zf_FX Br9Mƃ52&}r`jFoYtL`(?^57o F.7kx9_&(޹"H H(ZS+܊4SE:y Yq ++B+ButŽd-;>^U2UQuSV5˜$R0ưf!yMSիjVn̬f_+vY|}8%kZ$eB\m0rEce~[YAy .맏&]e=&m+D˚OWee2wDWRi6/iWrqDBol',f Cx"K^==߶yrUX9Hv T(BZpZ9< 4sz{y;TPZA{&3\7|3}l [DWR+HkdF{ARttYpFPA'Sah͉Ȇ@WKW 5tLm ]e6%%wHW%L G8!hiUF;zt85tzAh5MRBGW8U `]tzAh9i1QBztʺ[@#r-$aq#`օ-&ZC.emV43J{i)(EtO05S UFi:zt$D2`ޞl ]eS ]C&]lTk hGFOW4]C2R*ZDWjόh[hMgD%֛}CtEzzFPtu෈軛KO|ahN9 h]vtҪ;ypO0B42J{+ފZ*>Y^Zh(Sm7Ǽ2`pm ]eJ52Jm:ztũdjO9\\7g0U -<+UO:$~AO3_rP͚0l::UgN9`*Fu7VeP^S!oښ'\?u*դv/ߟ?Fdf(\C.s^h8Whc{2~+SelŸGlFqٶ<~!fhbؖpv&C\׃{~/Kd[l؈?_)̓ҢYĴw۟]%5,Ey[0Wω/dA>/,(UTY_b%筝>?oG~\3R`M =TȋZ5'm,$1AX'-T:V) F >,NC]`g"t+#]B9nл eu~#G~⼷4~^84<[uu8V?Bnh+A;k.ʁvqm:8DXEO73/¶|˔ 7Uj~U&e$xqo:.wh_(CqTXŔWp00ɴY~O[Oa U^V*)y0a34?<"p[ YFw9}ΖٗQBݟ`y23Ӌspag9he7}d3 n⤟/zw%r8>%[8Ȋ%,K:Ju3?c~qΔuAÿ^#Ұ]RgD!?B>Xb. a49p}a"ݘ;qW 8&⤠)G Jq86|D"ŕPZ1ABo՗_./^:{J ^EIc4$m0퓐&#3X`QFh8s̘1 mjTt䈆SE119 T&FRiEЁHOp諉OFwO͊z 7#J@mNy~aS k!Br>o ,lɪymB]壊g41owk;z.mݳ=X['#F%JC48vc{4N͘*il2HZ{$4@.Qȸo5ɡq!R /N<5<߃Y@?F4}ﵶGKgS.=*`=Smz;YڵhU{6BjWo)Jz]Y'a@6IQ)O^-n&j+cՔ+phWZV8Fm!PBWMNTG_5dORuϱ(&\ #bL1Q-S^b"UI8V>b"B^55`R 4`RM(^ d9XOk^y^z!'NFH߂4yZ"qTZк"98ljZM׿MxfY-`:is%S-`CW,PC[K]&]rǢ4EO7jgd=;sb7з}s*F7=b2`Z)Ej}Ne" vfU!S;f5`7-i:&YPY-q[G_'=qtN8a[9a[;a9aΙ'3XQ34yZEXxMJzj1!2&g@NdIr.<%gZ7 Uef9I,p)Se+v{ $MRKml`w9@]ꔜ:6:) MIЙWŇXi׎3,~~^Q]5f{t"8+7RjAUΗWDb!7i\Ha4!{!8P1:3D2UEΑqqzrq88FDr.>e2\QE 6T@p6hj:B1b8? BD26V'lˡ662?uBY J>f6ӨvٝbW-qW;L6*(OrېZܐї},ruA_e6U]U}L`Y|UKT4/mK jo3ePlU:ʂ 1雚xozBMwo}>̦FA#+ʕs.D 5J.F1dXҠq IhߝK;G2DYN3 %pOm:e 5Y I֗ʣ{my5e(W%aG~mC]XooQiZ-#mQh rbk!O6$\%tPjŀ98|Vv 2vM )y}NgY\,),[*>XE\*fq($քϢCf$nҕtО mβF{R6M*gݦp^l3Ty5|ijJNe-Q .+h[ݪ. j2Z%%'`] |(=p4jbcT6F|dA9"$J*WM$OJpA\e;SnT+_/f4;=noWb!dqc0e%u(QeU@o[T[*[x.o/"~xT[=WyzpKUz{̀YK6r=F#C+YhLPX$]U]C(QmvDdn߮xhA/|LPGk=dҀ }"H@IRj'f`RbltRev~qGƆ|qc*B;̟ m%a}}ƞ'+""N6 sx`r-Ƀ1'=REDuHAV8$KaA=Lϥ ߶$A #"r%+8*4c1Tdt*#衝`uvGk#.in(+wS v+ XṫgLiu{ΝuPܜK;Ǚ;2Fdwvt|x (6E۪qu.񜇖Zk<=M49z4 PdLnAh|;RAs9B@DډA⫫%%&VŬs 7DV[6./w{0M`ӆӴ~=^Rj\+'n&]&_+!c+TbH^Jb1NNۀX۲[)9+xV:z>z٢w+Ljp}YVu?2k7Ү֚#=97 ܜ-:(UWՠ"דܚ A!+ qVGR驫o/U'?pFBN|9Y>ZM{qT{Q&پ*SB\JS Z,5MQxkcr0N!G_zHyt|1;֮3,aH8.a;!7ȃ͕o ::Waӯq 1s VU U])sUP*k`R=Db%J$PY9t)RF.$ ZWΤ`̏>1 Vܲ4ov( nwսևwuYZsW? J{{jˋo"Vu/ wi1JT30!LDUIaI'~zI>Vա7!!72C|Eْ3sF@k Q B¼A0Tx AA,/J<2螫3sw _^ŷEY-*?(Ankq«4}&O~V{_/nGjڴxɯ{'85z.?h4ãD܋;~ qoMeG~= Mc(S-8)RpH@PwT]A:HE;j 6/RwSVywdxn{&*47@\) `YzV#5U{哇V0S @ĠMpQB)$jw>6=;*thՏˋӱ`|~ 9$xrF2Pǀw7#,lhyEPCW^9ZQܔtw[l-]W?elDhmԯ~LC8x(zW߿]=^}x]ֻPam2Ż_K1+Ŗ6nHyqxd~dY[o9B3\QH5d}2g諌{'댈ݦo<$D.Mi3.g\fPR$@?i{7ϥ9KwVOz:.Qq-:hc,Lsx,%F/\Ǹ,kEҌGAfO;0*.z] ^9D`Jxգ}1F D0K穤G~_ M*ޑq҆)հC]M`|F-Y{ףּk3lPyA!2LT!lĵЕTGA"mS\\])3Bό< yB {J k暂rEc`cx aF>z̋f'O p|~cx^ )5W V2U[#3CfTb9kCwIߙӯ1}GЮI)N n ?N|7^_ƺL*kAsR[3k"LU(cYI M[=Zh] hk N&LXD*\r$7 {?ߵ/kknHdנ~alxMWlWR*3H./v. %2AQ&`B"oz==ߣWʼndweeB%ӗeݏ7oT:r::7>GzӬ&: $O/ F~y6UV~+?=~g ѳaYaͯo9ѫwɫ?^^:\Mڴ{ Ow5^֬0Co~g[Ƃ?>YIIQ=c:m}# GORC,% ` A1cqΣ7V! ]~>r,IzLeŏn0)Kt(%bg?2/e8Ldk"ZwnDSSWØޱ1:\JWd.ODEiPl6uU1aW8(M;ʿ}E/vH>^jB&0}`=s}߼"KLGgFT-^n{@;a]U\weH_BLd!hSk,-;ݯY!}_-\ndmn^d|'CpYiʽa[ʜ\g!Kw&NEI?x:A憪=7b!޳ D SRJ0OcԖ,{)>Q4TVA g)FTq=upkp 茩4jԇ-NTri@# 'xbl*I8A0%^[yK Ύ+6mкC[3#AÔ-z)iY\-GܑgO [FnNUN92 D%R93%L*Ħ<|ITIP[;0`*:ܠnMK*"|0c@hôHu,KdjTÈU&(.qh4l1(t#)nl ؃uM7Nps1; ;=I&\I(blN"kt즦3524ͨ0U͹Ʊi+C$q!2I(A7HHt ,Ki2"R!FHf,ÌnF$ l61)w6Q5$"{S]D+Џϣ734 F3豜ų<+瞀y(*vz=dEys9xO-sd<6x0ndǑ]Fդ&6At+4R MU32B$/&;vId@w`oLrR,C_ufk[a4daޖ^44|߇W.kKYKJ~c==mͶxFt>:ro Z~cRfScp"o ITG=4 MHR+΁{n6 W74w C疟FzJfMK Vh6Õ֮Ə(. 3pOrx=qjsOޏOE߿SWq5,ƒ;XښP‡p? <"N`F@&8ыw {{E߼?h}bW/(]PĆ\[mS W^+N;NR*ZSۊ~ۢfmhs]kdiZҴҴʐؘOA)ukFYM.xãG{Mɣp_njASGj<`I1w/1b ~}a.Ձ4kacnS`J/q7Tz瓏_W 8LjWpn-/mWFrsf1Mzš'E~yeyeG-FE,g|fK/E7 |Uij^] d:~< }ڻ뵑BRm+De Q ҕ1E֪5tph ]!bT PJiWHWRFy hW׶-t(ꛡ+c)qtSsdU=j͢+^xGWv="R=Z˚NWR::J.[DWXB_Uƽ tBttut%R孡+˨m ]!Z.NWRtIҕ0|m9sΣ΁(eD~t,1+j8U~dLDD%G|'yq4ϋK=:GQ= hUtuDS&3FJiQ-;>V,Uk *BZ%+M EtQm+@+Y+Dɻ7HWjH [Ѷt(¯V7Cw@[ ~;t%vz#s`-vU8Zu$U=at%jЕj߮Ls[DW¥-th9k:]!J;:Ab2ZDWsB5thm:]!J::A1BZDWsU-thM+DiEGW'HWiVb>nfn_Uԍ0.i6^|͘(KGak8hl< PyP\Yld"D iKN]֐gUDR)ǼESఃkGw~Am^F,%OO,#Y,RbD&6ΘdI*XyteD}%ͼ!Jx> K?tC`*Wa>ށF1g^5TgnęN;29EX%M#XpcSc2:YwG'B)Uwj1 I,b'Sj DQS uÎٛy<=K:{L-\B[FҴfpnRBFvKS\b ݫa;PiL,| GVPwZ"ְ4-,h5i:K#Jӱ)֘6y`ik ּAJ6] ]i&B=^1_f h: QIj]`pykV4^BwtuteѢMtMvt!\BW4 ۡ+ctU0?2]Ճ{ #CiA\֠+վ]O"MtM{ RBBW+&+Ln ]!\BW7eӢutU+Ni]`Mk JB _HWBXIޱp,RfRYJVV64654 p9iVhm:M#JI:>Af[DWؒD-M+D):J*V-Z#\ٚW V7eF$JkW`ٞ7WC[pOW+ QmZ `JF2BoD] ]YC-+k0rk m{,촫oԎ]zĈ,ǢZ=zpMЪ#ٮ +UTGWv=5Zr"+EW}hT+f)EtF:Ce-thE+DٴH]}̷MA|tZp-1m+DI QJ Fx[tR*&iKz L4vQ`-[AEjɩZtRDֈ.7wm$B CMݪ[/M Hv`%zEDώsInIz1K"SU{Nuu:h AvR gP񘳳N1MQLR`e@=gs3Z`싎i.S 6O-H:KZp]0+34e1֘ӕd> +x=#`,n6bmꨮ^"]xVp +*΅t*hhN+gmS+du4;t8TvsWϽ Ͼ[ah3=0 $@WHWz"nNt֨Еu+A\ ԑ^ ]i5̈8 ] \0N?HWFsRWXφs?0!L2N ]毛NO+V{;ܯqe`$Z֓%ҕU|k)K pfܩW?V7G3gBӂM~NPKiٖRѕq6t%plLlIft%(򑮾 ]ycW]`Tn>&X٨+AD & <5 G]QItX@S+pTW/f~Ft% ] \?K v̀Mm5@WCN[]?jgO V=: %MlY8‘;>F]l pYЕft^]iK*Еe7ϵ0^"]ɥ 40ΆẄ́S+Gב^]vֺcxM-x|<|T8(xѓxlJ:ajz[ B&B᠑5tcE4|qttig;eOP-]۫ lU}`RZ2 hiO-JS /pjH:̈f͆ͲAqt%(; +U礮8|\kBWvne8HWޱzFtlJ٘AAk?)agUV֋r?qK?_Z}JnO]6>6w\\?mS+Ayk瑮NWCO?AI=Íy0lCtuJ7x]#]=xdw?+_mvD9;^-ަuy;p/^vs_7]ij{o)lyP~~m:~!޼?ʏtq*iE؈DA75w.ew&:~iYu_Ϸ6ߕަr~? a{/qp|_Rz/%>O}npwQxLbۚE x3Roya|?q+^ |XXN@Ey"e~D 47_gJy`} ~Z!ߠ9iwJtݖݪ]ħj ?]Nں[z(mCo䂩Rՙ\ʍqT.$rao>$ >.>Avzzv? em?tOT]B٫5`g-%e zd6z}9eЌ1{BR*ƨTjsե ݫRya( u6YmHK˝ ݸR+UE@YEogN]e,Z=c 1RkPTȁbkdr-H̍4Qdzs&E Eo r%$P,nmYUDs6@&k2mmĩ)cDN" =XBuɌaZ;B4Ccv[\Dߘ\t5C1qZ䔔;y hB0sd}xoyiuT=Ĭ\ɣa7^UA4P:iʹc0 &rihd5|/IJ- ïUh]#EG? D^WUGtǶ~{JeU!Gd3<%cNօ9 ~'9ΛHW-wvZVeêDAu@r=RNZQ >}ܷbK$ѻ);R3B((E?F曄E!j`&HVհdDH,8dclsi嶳\=D@ X92bJ1U)PAV]|G=uk,)0f;DrU.Q{uQ=56TnG0Lc*yˌwh|fl,P,ڢ % v**6(:jBh:^vδa樌5 ('έZ<ĪuUJ|Yɠ-] 5![]JDHQ@ Ͳƺzֽ?s%ps lZ( W ܡ<VY4n:TZ{JFUh]%`}5 ɷVzҥ yA11vx X-VLCjD*1+5K(&4: @pPk@ou45~+*өPM Φ X2AJqiB AvEc =2UFmTz adܠaS``cHc5aPED&TD;-Zʨ!3|kڃ A"g#nC7!wT \|})Q r|K@T`-N ǭ`NB81+B4xo;U %t4GQF WPn9@GVLD$W{HVX$ETXPjk= :#.c؃VsHVWd70R0Xqy@{KH`Ge Y%cԭ)(+8G]`JrAkK-"VwV5B" uF+tme,!z4%ny1ρ 7DRD/1=n;| $$4”A5Z%l-ѧGٔhNJ@A "+P(v&˽eg:|lyB $"2Б5;yẔ? F#tF% TQ2Q] !JP {Y]ZtX8Fa!d*)njpF̪+(.j^ nw%!gѝM4MR&PYI"[XP)ʝ\zr7mUH}A*-CR% CjM.܃g@e,|.˴˶\'$Yy^5`0uvMr63IҢG*5{ ;-,F ݚk)Dҽ$D(Y=yh4vMfcdGrvѰMʌؓ!)a!/Q[tؚbCiH#Eu7(V1 /8eBآpnlČ;$zށ U @*mHuq19MC)ܬxXH 5jՃ*TJM4O5d, (sOkuPǝ v)$l5^ A8i`&X; ZfSa-lhԋ`Iu!~)A(@.iTkzPѠR#t!oh NCaކ`ݨ1jŀrti X%TǬNr+) R]AP]Ņs˯W?.M:YXv>_?ݏDNqg߯8_;6, o~!.Y L/XPd;=IQ,YÊnU9T/ǧ\C| N+_lgZMʋIxA3?MKlp?*L'PƮըxH<&B%Sk۱c}U!%&[U r^83Hc$D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D @x{Div=$H3I P,@HD D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D t$FH }=$5ԺTH#"d\ D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@oD`!07Ր@8t@ "+R D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@NQC[/3j ~9z(/gC 3eDE֏ P浀K{?tpE5.tVdWj rW]Z9xU*,Ѯ$7ՄCôƮPh{v*1ڕ̿Bn՟]`[Otr]5U3nWRlҮm~?7zHm~ʛ;hw;ۊN[Bk9/?h1_wMp5P9ct9zX g[ iȦQ+i}4ܳɡ4UMhDEv}=I0UP-|T*=b8F3++Ʈ:$VIxM8PpEP׵|8P{vhWȚ+ BbWVޮPVdW_]qI9])xzr<]0}S%Vtj/s+Ȯ@BRbWVԖJ(%ȮPȕbWˡXױ++GȊ zAkIQ|t*=#:BR¨r5vaYWevks\` ~Pk1 h> ~fE4n~~Y_VͻyEs &l\Mt[سo/:Vy^-}Q tXB?Ѹ>.bZ(_?-)'~>9D۷#.?";h~۬pݵ7kTl+uOFp&sUoZ\iFfZ6;_POm:H)FoRiV}QZME5c[6`  &B*3WUQt[>Ycۂf(R㡛59tvʋC{ʵDN =F`eaD1vX9TI"P(ښ`'iC6ܦQ6CiTiڴъU[]]W]Evj8/rB;ɮǮ֔[mv ]\PcP)!:BrQ]`g+]-vj=C+T)(:F)W]`}5vre5Uw*? 3#] vv/Z~j?Cvɮ[?  6BbW֫71Cvuv%&BVcW(ךZ z>ti#+iլ"^j cWoWrh<իؕ҈ -a~s'9eyxvŶ\O4c]Uew.sUdW |2~rU5OP&-c+k?\֓\k+T]JڕuY]Yg(<ERݮPfdWGhWAb++\Q[T TjIOѮK K+˫+T+oWRǮ^0oWswj/BV~ja~ʝ5aWbdWϭzP"jwk-vjoWp#+ɸ5֬ r_]ZinWrhɮ^ŮSҊ +)+IQ]JȮѮ;i诓h^evz0oگ; E_/8ECUNjZthtƗUɞW@(cΣ.eOPtEBtu`gZJ~EXų{޾} 9*w[.'<+~~?g9@i>o m:^myn|z38aN]>7|v7\t9|o]-Ч4wrD3W~./~U1[kXOVgYg0cݖ,]'W+v{={YQW}g6֟ԝ,6n}>"~y Zs<^4W5PӸῖgAh3μ A:ydM36[%_>LQL6eZnӉXc(>tU^Of)l&z/.mwl&K06.{k糼3j4ucV9i,F+Ӯ0Z$!>.rsp/g3,u2}6?ۭ~xvvYiww¸\7ۖ r;;/o_z̺, ;zύꤝ\oJ6@Tx}3FYO1BDgr|zʅ=aU 旫o9[VٮbR>1}̏w7ΠOu\APMH1FW0B V.>' \j 3cNJJ݌h`kzp=qI9.GJx%hAze/NlWC`LVFmjʤoalDA=A䏋y ԃ/':/=Ev4svŅĥEJ)z2 bVj Wk>}#BV&cQ*]T.dT.A%f"s! {,Nբ X?|' pshsq97?׋9D,Fħ nz :yrug3}ٿD_7`Z 4Z-[a ɩ}R'=V3'ϞklqHM2><ڃ#|A4xō!K\:|i!-. BhdNU" =<ރ~Wey˿= ${q;œ!n׭{=9:l's&OޮM<)B{0~ rQ(I{ffӮBSHݬGY؊Z(/EMPqH/=ʁgbp٣P(=2!LE_hlUT&ɂ&6iLUJ稥ɪ""(d٣Ԛ )u҇XѺ])t,{ mk/̖"z={x r$Āг9~?Կ }'4cO 1b:!y!fP&:ڌUq&7x8wj_Ƕ2;[#SgRIq@LS|RgA,"Sl\&|~z!V'.z31օBtp-ϼM\G ۤ Mmdׂdd9MqƗru~qVao}giqIz8`}PuZ)T}Pvs^1&^l?^TF\ kB4Zswk(N2fZ_Oi\8b0Ma"@|nΧd>'eb@٦)wZ+.CfF0U,*jl.TR0ZB.EV*ȝ}lkq*A 8e)o1f aEVsjBi%W7=ɧ79'~7ng+;/|ԂB'1Ifmμs(޿G⡩D_]}OtvC'Aq{LSZe%)ou΅en34b[Jgܖ"9{[$%{sR`g-]eJEk"scatHi#knybdb ta! 9sDQ Lav!3͒/{gyb?_A7*|})]b[2%CgKq Sl b\hâB8 Q \}-cO%+:5+2d|ga@1Hr^9`ԶJzckK6jjՔ$ʓ?<& A&2Ah^s#-2kYerp!-<Sdm&e2^cW/WڸouWiPa-rg̼dQn5^??eNQ k%\D'#<ŅLm )]a&X,WwG].2嬨^DTϝOm:Q{ZebFZS\0|ѬO 2iiB&6qu6kPzGo-乽C;1\?t4]nT9T. rhn܁IˍbknW`YQhnI,mY\ i /xb;ȳ&ϼ:1䬥V,3$JeT)::Ptdg_.:U)nmcm㘁(ٴRp&,+{MtRpt( "m;Rmt.?{G\|LmEɢ I~˽Ƃx4ÚXFv^aU:)Rh9V獂\jr~-AxɼzWZsl!/d߿py(oW>>N912>z>ܭ:<'ksrԑ3 L&F.:mUeFci}h]4buEgN5U Dbإ˪TV{B,.`|HY2I: Yq4 ]pe`TnyG;&XW`8$@lDˠdEk -0R+c85mw~׺CA?ue";Ac8{Cm Gtnx32Z±sDQB9@h4,qCv1:%?SN(bhCw^>61le$caVK6jSF<Ә-潒?3fJY+q8}`~|Zrg^=V܏z t␸b b &Ek Y#˛ZmpM=01F<]^JI;u/7|zϳɍk>'1{ze{'}Uli/aR_``?Iŧ(Zto{º؆ܾ&a2Rz?{z܂-l׻{t z7"}.E剬մMc+!Sb/w.?N[W.?tNQb u vXQ*Ȧ="fLyTok PnʪzYGEu;lTWV2h_S"di3^gVMAKI&S Ƞ؛+G L 6<^`sci,eן۲t좑[~yɽ6_ږ3W&z/6'W>[j}*󫛾ϭarqM wmѪ?Ul[hq"Ð6o{.uul{N7m19}ҲJZv~m;_ťNp=o}u7@}}d^~ەGD<,;P7~u}j_]1ܶySaaZbU^Y"6Gyzpl>}vG7'W4θkEs;7 qhsH#5sC--5àw{lY3v 6rT}-)1qLљs+G(c1Ԩ[2ʈlE)4XMvtp:d(XU6=u~IoѴbgʡb^Fɒ%e2!ƧTvDLI"1N^`XKVYw\<]%<>Zt_ZHUFc 2Yzn+aW ΘC+z_EtKՑTaza倌ꁐ;TZP}{bU<2<l~zٗC׃i]w M!%PJ]_9XvΔt6 < oXF@p 9RB$o\\5n]a^ 8MȚm@m:Sr").*hɈdGƶwXh# Gƶ-$YECv>"eԬqMjE ^ivW2L֧j85:^r*GYtkX*ij\JXu : [P!SvlO :W*ie!yMƕcKFk27Nm=uZ}\V]9xyHu^Z>OF9I0FcBA ٗd2&[J0:Er50q桂t(.*5]SMjdU1 &SѠRGEVkc֫BX 2.`Ж\8)bumk Vv˽ǎ?Fc6XC&p&[0l"]d-.g-s6ivn?_̗=nEWG(JjrD?ML>瞖^*6_sgrT66OKjǽC &}|=bϡnhkԦbZaY '9Lzv |b'TW֨Ug27_6F0m3hNENͧ[Cb788]YcqhdB[zl2wEG<n7J\sx L'Y.OxյwrO f8EVߒ3 VgȋJ~mvg7P{y"j{+5Rѷ㿬+gg;fhG~ǝGcXL2{55kT!Ko޷o;<^1L]+#rͿ}7{/Gic5r^}nZ4L@jo|qpyS}L&o 9зa|' m,t1wHuXl@g8ւ{#'Yc!2 ЇvzR4DASb;[!|2 V혟Օ.`cM3V>9oZQ-A0d."Nv"K)OγW["ƦbdyL,kj:]Ow5@4򊁿I83;3c`sQ;*|&S|o "a|Q"6gC1\b]cU7KGi7_hz ZmY!+L H>S)*dcJ x51Q Sh}=NНޡ/zO+M8pX|˂S^bR(j!`5)=CE:pD(huQ CaSJMFeE{ы/b5 >j,B4t-&Pty>}C49ߔ暝+UE[RQnY U4^ @KTQL'PCf[O2!3&Qm-{:pVT>nw-n)_?uC3#w!"SmB`__RM jcχ=pRA|TmZtri'o_^ܳ"L'٠bq[lSgáUJe8kU <bgAFqPmTih`/LJf6arXE0p0B1W3W7 l;g<=tv=؝YLF0l`H2:?ڎ2*[Fq ju2DP.aGȢ5 z|YSv8rKP)gQY@5 i{؃uf"&ϵ~`G8Zĝt~%Bgh~b4HvFuje@yCUA-b Pyl `G]I%eBvDFoZ;El8{$gyIb]bIsb.hGx_5?ػ6dWzJx_{6q6k!}H9~{fHJH5(̡[80<#-!˹Wx,D3m0쭊6θcWLnA@Q}U@oy\ |w8фе+ɬ~|GRczI̓**@JŽ߂}pcKj5\EK8/Fb['e0[~}Ńe|6En\L*Pa 68Yβ9py<9W<BNʀ Wuh*к ] |X59 po{+U [|Wx]]O?FLQuޫɊyA3Qݢt6L*ۃ?~YcΪq[<_ r8IEmj^-]-W=[[nSw|{e>5-FF*E1HSQ`m4rlĚ8E~MD8O7g8~{Ӫz[ UӢEf׵?01tQ2na{ّԅO\7('C1Lq5n,e b ͹%eacByB@Q/@eR&yZQ#ђa(vJ;2%_I RIZa9IKy >ל>9_{o'KUP,K6;'~|sRGm~V7tg[OT/_Lzf~/W}n}TCP"_}'sM/ϊ_GIoiߟ M%³?~)ŎM1M2:z^}wMw|t"1GOq|4 6; %jҥ~+@,N~f%FCvҤ ct6w@˻ z/_,zX/fKRAܧOHIW<FKkv|Cn0/_n=c"IL*ݝk*WAX34 y WGjKʢ`Lj?IC|:Oj.nloEukYK[!k,0yL<N&%OCtȋx1 >k0I/|ĨQa{?u#<%%Nb?Uoý sƊ4, /iv?&kwe6&: R2C6?)>r.wQf}_<^|S/ P%J[ZcDxRKBiljZ$ 9=|(^gmsv:һE|da*Svn/?Ctٶ@U/e,sG@a*RoiWyTbЖӡ6l;%:v:[ڥ=gVwKv]o袴GV%Fے'}9jnə|2 ~r-K &wyaѸ,u76q[&Bht[nu7{ߑ< 1I&Hg5 snv]l6=Gq7b j}L1bJ"ŎzE,kLT-&i Y˄SYowr:;ٱ0T텡vA<_@vEQE(Z)FiFcPp ZD@F(-u6,Bx֍Kiwv3uD6ޡ] ]sr}p {A R2ɟZ2hA\L ZP]G JTRG )̱\L=2iIFbd`]?>9~{Rsv q ii U%3\bYARm#t=?9YR9![:;P|O?[*am~P,٘4Z[T!F4~H$Nz~b,Sh|2Aښ|2|<,Wh>Y\oF}>%[oܛ{ч~vL \~=H$Z?,~>g}NuS9M:,S\/e`LY8 p1LaU*XL"A!-! v Sx#Zs6pC`d72*Å3qS%ap =!/Bԁ!Nu;lhO !I%iㅑJmc}J! E NouFE .w3u DIzEڪ]PHmBr.HK; ȫ sE{*cscP Q0!S&aJ;f2Rʃp-10GԾ3rDwi%ew$Ex"pZ; ]X8cQPZVXc^ġpalf6 *TaC?Xb DE,"Je0彉ciw nQ\e(')d/c#c@" F0H  {5w`9M4 :HFE6{!Bvcߴm?|ak'J玷l y= W Ը Œ9K˭.#2}akooD(6skqzO6}[&&S˔B!ghp 1gz4‚$/rՄX*Wewna\ gg>ky[O fE >䈭â4 r}ſm꫹}q04h KJvn<>U Kffgr'Ӊs;0u:wQ NpކRq'ŝn]g[czUw_1<LG x0k5f,`A2`3chnDt|9knֵqen1M3DMu*Br.sH3y>)sPgdU??3RFtl|86p-VvJ,+UDJG"`Y؄sL烨Xm_,D;1L8`MS X>qV'xc>/B '냙V1Dbw% qCF"pt[ct-$4`.)~|?uxs2~>MvZ9a?30Mq_ܣQ䣘UׇF$^u˸`ͮ_ V*D]92;8U9 YeպC5ZGƝcm{E.|Zw|σ~[q%GZ7`*շY麧ɢt+*}x:yO8ipyhH ߎE&&UJ.^ !S(< _HnRk.ؖJ6e i9nB$HC* XzPFP<8"(DΌ#t[Ab_ .CAoTxjW']x2ËJ` ]@>hG!'<5(E"22Q1M%{褮yUVWؼ{/Tl&!NE/W#$}G]a@[diYݸ t~ց*}$&T"l,g04kDXQT䩊m.orUCRqxQ/<^ּNn"GNsqW,-9'O\tv^+k)L@T/%a𨢚+&9[]jMy9h:NX6v< 5R|;0lU$4_9Mc(Qd%AXh)(t3kݳ:b{е R .JP=Јd$v$\6ƝVZJfνrx416^CS$!e+[3CRuydQE4kzA|)56Å;5F/d`ѤJ8p b>#wMlmָwQR@̸&z*og}>12wV8PCg*ȌoC OEk.^ r. y&I*Xn(+CD348"U,sSU؎OA벹USN[lB!3/!k 9r +el0Y@b2O12%T=>kmpx%/UjRzbDleC6֔/':e,G)]9?nPj X6U.?`fG`~$'dҙ2 yVU{/|53:*@Z.Mg+ p+ ߫x0%!r8>20XBށ, -@{*7޾7evq[E^Kx*[D$"ZD# ,(68PZMy`o;;"-]N~UA糮CO i)TV|oۍ2 8FFJ*ŃK.Iz8GYd܉EfڥkRxcrD *mXi)$b##队pQ.`M݉1e5J+$Qpd BAF8bxOi\Y.TN[CNX B -K !!Gʍ^ywۨ.9~sG!A+@d4TΒZʼ b4 FP$b>eDZx`#a k!B( RJ M'J k`c \>/)/s\ u Jqk!W-"<(098\S MA$lx;s@8I6>֒e4=^޶^.I^M6!vͺ-za񢾔,/E2/[8Hr>iË_ܙe^g4s纀n+?|'Xqqox~d'"ܺJEkVIQI)M(LEL_]c@>B ȳգCuI;ĬW逰ڄH  MAK H/`ha28ԊD,]cnQٻTߎ^Ǐge;!|DO:#>w(SEp: Xc/TPCRt ]QQq]`DSr{+%ٱvFSYv"S+>ݦUņ6v[x{GUS[A`"*m>3pKU4\5Vq`t |ji G/I 0C*LN8Aӱ>쌜av[wE#vwՈc({k+zKT{#VAhpc4 Պ`餴64Vwٹ:\~ s0>urOɖjOD!#LQ!5S)Eڀ̼JӖ pє{}`rn 9JkvA+7 *٢mwuٕٻQ*h[zyc#迎"_.@vI((x0ugMzo[˔ ab^#܋FH|ƻW bryiy0\(c($iL0q`6m**7ʃ q**P&ݚ X26{&CDtx)RRA8);%)$#+ȹk͖<(&'Xfe-&Ivn pCMN~?n٢n`G2_t<[>TjbzqӴuryhw&lRڟxoƧH}95_e5ѩ㏿\uр`9`6^t2F7.k~3Z^{g|iĵ~5LG^}z\z쵟lRKsÎϿ VsIr=:*A5twjO(o$ROq&sB>O LI,flV>sth8`ǟy6<\\`6 p޹+؇ {ؔq<"5Gf4&hɃl0@^D/pȋIYz6xxtK@&r'uVyȯbvfp%%(yͷhvcEv?$kw?\/:}y]g$f)Y"XQMϡ_J͹х]ϞTK.Tɨ6Ė$ d1=8m-6g%i?/ȁg9h$aSb K/ֺ5bg3%I(%G\y(Z^)ݼ:9)RWXYEےM鈼?-S>q]wⰸMЈscb>bvE KjePY!.T0IIVꚽ977j8S_E T~khM@EHAc W鷍іOHzIxBŔ?MeZ㋿BJIrWPȖWj7Mm7Aoo C4 4fFѐDӘNQwZLg4vs{,Lmʮlݴm,P Mh(Lۻ`auf/m%:v*:[ڥx5(-w!η#OU|%jl{&gn&ޡ٤ϳmI[G'Rw!4g-Դ9g ocR_U&>̞csz@m|0wqk6v=NDt.\nѶjqg9d0);k11FSC@$$Pogw;[XP; oxv{ ѨH`"N 4#1(TۋZD@F(-u6,BODF%tѤ])oܵ븴;{vG\㜣/co

W/kcbV~@\,I@&ˆ\;ibyg{D:0*f {.2.g`^]mwjl.GMvo#~ G|=Pe,!%HWm0nGm9R1'LFL*n@ 3ZXXhʌ ;Q@25 Tٻ8$W d*2ȋgvX׀cA#Mw77`bY$R DvUWeU"2E}wm8k8+d2-EM^~=RKȾ>Xpfo+"6>BJBBJB⟼̩ *0B!.d?{w" phHG,Έ82WSBh}JEC !NZ+ngavVyIi4MGenv-%(o-8^ NM&o\~ p|;.`DIK:j%5IS-Y29Ȃґrm)e(g`ж2"XI1D%<ѳ]QD3iX6ܥJyFO`pYBJԕFmRhѥ"(zk0hGor_0=kgXs,?v) _dU0TlK)&K&޷e]sw\ 4{d n_-K)~Og d2=؀zz]ᢚeղ?@l4ty傒.|&K^W+ >vAx1eQmÊJ|\ϋo^tOY\py䗮GZm-jÁx0|懟lXjbWﯺ5~؜g]t:vy,QSuоW?h #ȅs4qIhoi6Y0 2XU0.mdOUdoc@4Ze%B5*TdF!c:brdǧU!=pAڥ "KYO|d.{ -DĴ|<޶IJtiq)kR^k?m?L?zGH4 nVR(?a0ReЀ/9.y#YLTvQkjsmR @=XE&'<51!)QU0YC! ؓs$YptQA ۋ.W, TEmt=Eicp{s ^rC|LΫ8Fc^Xg"`[n!Ԧl <$1O՘jE5@J*1hd AzFMa*hI"9mVSicĬC"Ș:U,JddgR)9_9넦Qoù]BYn|ecY3eywy&cwSƳsaVqۨ1'͞i#R /JYJ'T 6\6T c@T$B1)]dnXoU} ^b}8x| y K*/唂zu! `)R 2W<`-^Ε:\m^aU͕V 2WË1W<\gK1W+r0WߢRIwU@+qi29m;)]քl\/xٲøxUQdQfwÖut2d^}SlB&(+o%(mX69l&iLKFFGͯmҥI-$].d/>U3{|zʿ}`,2 sQU|:CqN?FY.֛0˿ݜ>oIw@$Kȕ|mZ36M΃SourDkt$eMh=5sP BmQ@I+(h̡=Zޡvf[5g ٺ }Evdus|B+)+i7.d;4s2E:BIk2IeSqAR @ O} tJ%l˻~|頡HFQdST ,l4R buI֖nkǮo!AGgZ=umJ.:i[rs-8\|wcY#0"զj{N Kڢ Ǹ6q)볓ݩ%fd([Ib:0W""qB[SЛRx>rYĿ|kz*tErFPl譺eRyE 92 ш0 VRH ԯ"ݏ?3&ob< י\n?'esof5s<ղ$|pDh\3u'UxJXq|H7.._~Pp߷8.cŭDtKkj4Kb;.TZ_VX~/={4/vhɷp˪uѪíhb:;EZG99گ}9y3咑plhYTjjf1?꣎VZ}:uA5;ؚbc7&IaS㨙oMwÚcIh>+You/C xsUHjiԬ^p- *՚-~$gsvs'8 _o2NL9$"`d<^?\…s~RV;"Ml߬'I,FnGoعn_~x>UYYK?6MKңd#*bΕ#'/VN=u7;Rnev)e*`h& x8z%oS--j%eds/mӛL|%mr~㓱Kmm2Gmmw'BBX2 nW'F3.6Հ.Lߦ' GldO2%DmdMٳ]L@%:26n 셤Ĩ@Uc`4}`¢eP.,k~8;N5XPv jG`Vg'z-!&a<`k*D%b0"(nxXai e-ȬG/UV \%P3ֹ.a3qaԯ[W`<Dl"mvD,"84`S0ڻl|m,XUPyZ x$tƘe^:]"wRc}$;VBJř"9&i!3aOZ`%by#b3qvSGE71u6Ӓ}q7E7∋@HђfG%Pyk(ɕ1]DŽ>H 2EW x\<S䬔SЁ1Df5 V )|`S|F v5EX p[sZ&)Ry楨2Kp!kh&ΎA;~OJGֹot>'q{? X>-cpV~3}W_MVO_ӭ# QTqI*`B q$)h[c4B)`b PuyPF`x`F6N%IGˌn:95d-8NZ0Dc0iKEFJ m."T)/}f㬭b2NaNTc`h&v`8)p?o6ZbL>֎/wz_Zfl7:<:aƋ` V*<{*=G]{Pc,+lZ`SLWF""xf4BF!Sm(),Vn o m6~D6Szi*t8^?3r|V9GuD `s!!Q`^fm2#DJhͬ(zZl]G P6Nvy`}_!d#ī^Ot}!dKV:BHbV% D.#dRu)Gz'4wYo47 flNsgߗ1Խ(O׽RhSkr M\vZ ?jpBy?Cuq #"aMdc޻MK˛KbklJv߫z}(O Bˬvd呓\:5D֎'ڻG x^o.to-6跓gOhzkcn}q7|;tUrڿk U'HԆwfQWBC`XŅ2:[aMb+Lw5bUNk{9i6 $_`itJ?]zsv{'&CN@j˦W6T^Uؕط Ͼ:O6zicWc|ثTyfmnו2U(ez{ʩһY)%N=ǻBwm.42KoB3iLGf۷:/4PxOOn5>L-^̂I݇08rhp<~uvr͛ck8\\;0" I_| /h]$@\^Ǥ݌wb+5q,HOnv܇jirͭBXi]z,'4nm,C HT'/Ul%"l F!ŜlM&#//>&YxnɄPH D֒7yT.e]")S( !6W5_;fo56ι~DS؆N('yϔ- 65ȗ@K+*A[9W |ъoK4n26mmwKzBߦXw SHR)δo̰v\ݛyXoO!vw{2*)umB_Y;x*F2+>qo/YcNGE Da{#hȆ@b/*Z9^8DcLWjeVu&f$lzC7F/? ,vGmW~}=Pf#f29L :g/B[+IƧ(ߠ#{IY^SpN^q,UJFUH.L|/zv<͖eCPչo־-.ߧX3W3y^zC2߭(fK˫`UիA?,CVC~|3sXyWq‡v(.36 y~?/8i;EGI;]%E/I1]&1O q.]w^Yl4U|_usf\9aM_n%K|-Z 5]{lX"\>כϫCWO'XoP볓U VMX:.Vo0LVP}O4) {5w]1ї6N>痢2`\OwoOD0~T)u+m}t}eDt|]$S'4E(n^:8 anHjtj^kR(!da@T˔ {Vd ra.TEX|j&%ldef%Y6Jecv QS;UFv1mX۝Z$hCs%S-a_6rRX .m:Sθuk?_oN?%7W٥Gw/T' :ҲV( uEdi3gQ^y!m)쭎.@ )=Zn3=ʽ6^G3b5WY˳Tb݈EL:BlbDB$V7.DaV 6+rfX *\ 0D(^K3-``锭ERs TNkMĞiڦ x<΀wr%:G6HWN\)KWɖjo'} Fkk"kh!Znl{\=_ԉtstC.+}vHpu@Lq&f7L~iX(F g_ӟ&r541W?06]]z6Rxy4:PMXO-2Ksςәg=5&=&sWAS|gTA>Ȑi9jbA/P0$Rp5Kfqvhw )C$'n/;L4]ݚ>kd~TelMɿ+urY3BcG7>{v/ Q.EQEPc,-{=fQR*LLκD1]ruX2\rQΔ`=Q+ԮdLR/!ϼa9uKpJ \ j4 ~LnJ@yIgh%GkE.{aBR^xBizaCx!OW {˹ߎ37N {rڳ”Kw8=Z: y) bCWѐG1^~*F?WΫc%EB{59c :b%P!CJ#Z*|Iu4AԤ(s+&FNn ߽|Awd;*/})VyGLK'.V}kXʹS=?m!ST9uՅ`uP!%lJ‘RUGWIE)UɔXƈ ԤwC6&ĒQg`2R"|2!@R/ dn )ZYtzjvտZ["IYWlwH1bj!)fߟ,+Iϔ BvBJa9I(t8̑g!lJ9Y-("#Iٸ/P3Sl'uLٜ.,c?{Frb?%Ȏ\\p~8 ^aяjIg䊔eoY|ZCk ؖ8͞_Unj=fm4fB{D.#Ҫ)E<$FPhg dɱk9{UtI1IҹhW $)VJD/ib}v:q;]T,W5cTAdM2U@%r p@IRƓήT4 3} $c{({Sw2"X2ڊ7-k﷫&U[A;܁B yZ0jڥ"tTJ*}4W xuFE&5\\tpUZDkE6;&{{7g얚w1p3mrszGH;YyAoH/agԍ4N@vQV$JH23*1n4ً`eNQ&ZuH.s!ۧ5j-W*ҍz8Mh\4hOHX<ֿM/}ݧ/`C-B7I˚_JOR*7=ta>JΛmޓ־sCtJ^2jz#K`8e/׶A7zRkp'ZMO3t!S ]=6734Qi:CZZd}.OҭLNg)* IUNs[&JbH;ȲX-QN' gRӿ$lT2<ҘF,G4: ` )W ȤQ͜L|sNz)M Fh=Ot2buʞ5H$fR?1vXAp3(}J.{?ޙ4h Œk S:O|/48/;~XIL3M~tu q*9CX9T_z;F竨x962hp9d zKŢc2S t @ b>OXt|_~2 'ВZL)fԃT㼻97s*j"2{%`蕳jiP{Fp߿tSv"ɜv2MMi^ R1 GÛMOG8k ֮1b6$<'x[tP||zE6y}JڸOˬwpCc.N}htE/wLַYOH#+Y͸ui/f^==+#pe3 XjWwL<7<|婷m,Xsl(":k5uD2hcW^ᅲoa~g1JըXikRM$Iut2@*X2r9+񙝿8Bi/my#%DiNE1 T )T 722g1KɼZ(oP"DJBTDΊnK1eN*a2OƃD\޴ӛv|FwϏ-h:xsNSoV~T*8z,4ЙdD3EYZ0M!T lVģq0NZ^XYF9LW[HL#6oR}זb`,ٮodjBLZI2R|&9qvn9ߘǨ1{m 6&`Pl9ܡBv;T!VXz?CVq B' Z*vuU]{TW Y nʲ)Y+ *f5e~6 O8?Njн,w4^yċ?OcxFYC~J2>'HӁLd!^Ĥs'CЎj5̠8({dbst^}kCEd*x0*8TIdy3:v︎tբ/Օct< :d$/)$HZ$0@ЕnBv8N%Y;6,ڌ6gDRD`ޥlTΥ+j\Ѡ661]ԻK.i]&Ea k5ieAXmDWс-k'A@DE2cZAKqZpSҲuI[+m+)tIiW2-F'^t5Dױ w^5LdVJ[d%[" @ G`:(/@f-g\Ɣ;AjV!/us&2S *J-OB"TPlB=kg %i-= meڷt3Ce7~7Wr<U}q4N٦K_kE]Ak m']G|sd6-+,|:Y,}=0'Y<0LYHĠq.{. Q\aOВ-f 74u+.{rr_/W8 j')fC޽?vcTx#^✺3 <':Z>/D\ҍ&qpq//_uqxWRInṗ!|$}\fTg+'ܬVLٕ5Rf<1ކxB]56 C)91 3["u3jEf[8ݛqy葯zDʲ[x^cԌg:kx f-g4fk*W_MeOK,Eq g[?"-5{5QrSz9k. 9_o|i5 p:Kf}?I^{fkm]^Q7ez%'g$$4x NZ"d8C?IV}m+hnZzF+=ÁfJ 6DV 2Qȕr&$ƉQrQȺ~88}+O@loW"vTWx|`׸Oˇ^zZ77 %E?XB9XB-@cJλw 8OY BiD>VU*TN6avje@R=P.uL_>|2%.`Hr ڗ`IdtV21٦B-iw%~`ԧk'{%ԑn{A]l#Kp-H}oZ28> 6 BTo{^VM~U]Uܖ@n_+mn`3Z/7i p>Mwy߯MǕǃPّI{*.t*'tLteiCtZW삷oE#XP.̆`hg[=IŊo<(xD6*Y.vTvڤz=^O1)3fH|& <^EJ$,Y<^R;lr **f(ytܐdQg'"FDQ+.DdÂ9J$iuGnE~S|r|4oOZIoT+ǙZIr&:n]uJ³V&CL%!Mm$Pųmр)hO, Ճ"h "R +o1n/eV aشflݚ9i`Cq.4.<)]< F~3Ə87ÇxklaFE5\:2<$F:% %":QF5b$ AHQԦ46JVD6 \&P9#f؍[c mڍqG[v`f5hp$ٰNs!5e'_K5WpKP0/8q_ՇY&@4!C^*4j"K! E8dTa}9am;mvӈ[ AiH7'^xp Lgqp=giվmRSOfSޗRӯgh?}ۛrktv}=ڇktɍM.Ғ.+;-6%rI}=wcGT@"z{^mUh͔TbJnp~ 6F&4rԶlSҲ|^Z͒FB?,,K)z{##zbq _PJn}=n{|Kx?jZn}2^P^ow-_hhF,NW),47=WɃۙ)0E؅k gߎ:B盖pӽǮLJB晪u]Gu~LLN9ޑ<l6.t9/tx?C_vIW}oF}2g?s>B]_ˇ/6';0۾%xYMiⱹgeԝX.\O Qx~u6m1{ҥ\Nϯ]ڽx`e=B FmW-71q7XBM̢ʰۆ-%;uU"Duafu-g|Gs{аY} $"Kd4#hpJS ][O;xd)R! -Ug'_^~PM ?9mݜlD ^`3)pNHJ)cMggu^?6Rݩ\WӍ}krVą'~;RۉzP}hQ#P/M}E H`LC/B8tD^ 6ۻgCw;m&○^T:@#A*Rў gd0q on>sgNq V줌Qdޚ [.6-NEMiLA'`4)BU{Qbx#83ʁSKQ\iI-qvj /qN^?y+(m*oy" ?Qw[loņ:#ך#x>ˏ"zr?zjLeKɆxl*3h%S' 7Q fb? ]BKL BM 1G4v.egW.2j/h=Th IJO$)2VHD˹=.|;0َ(=!8k14gg zSzs嵟NטkN?-i}Ҩ*ͅvjuI+ (1`NZa9K+mJV7NL[rOU,x7,yFZBTz9ĕYX%E$R#zX[G +;ɄL9qdJ3AXa֖I2|YYΪvٲygz%c >'K'gems6 u[ "ȹن(U{Me3@M~)9% r4hlp{3uI_-ąA0!N1Ӧ(~0"74,2rR 4K?y QqE5ؓɺ4L>oGL1`r)3/%H#sNE:1XtCiMc`mVDR!HNMU=;㫦"_e٥O ! \&n40Ёg6f+;f$'#JVNy30քSju`eqjV&.tt/DCv c@Ķ䞴@d,d#%`K7w4H.+mh?k0.I0:c   &bH~ Z8W̳?R,D#i&ip\Dǂ /;XU,yw)%Ph|,79M.}hf쩫_oԯw<_>jy[nոntTg (o<\OWG)#PdpLhW80( ( XM Cp0ҒqRNYA),[BTxqB*XE8Oύifw|>Da1Ja1B%cC#fGl-l0*ۡtO =iCO 8P(|;7GobR~r^gָ z/ijjtj1|㝧O`)Q >WTE5{ h BMF'z<'VA&H"r!% KeD[U*CV sk Yx?f2FzSBH{~>˝|'iIp>gx+%ˋIEM0pJ8qqXTxPg N R&+&<P!Rp 0*AY9YE䡛[Ta"[~>ygTZ(nb bJK:lmL4EMw7=+}ڠL YMcFx.'@U: O@C Ht6UpPXRbEMx0)|UC6:8iҺuKzgT*IFkR"EJ$t!h2P2 /.cEť~&fw4ZM/aruIŧQ!vhN Bn=Aht[G}ŷ˸~@6yо8 H'#"짡蟓n0Ieӽ.|]`A|s| U{k!BU$>wmk~Lg̚ɑ;U~.5S<8{_<3[Ѻ,{ױݙ$EQݢ!5.8wW%~W?(u*dy +2VQWwuzqXKmf7fK3fjcGNe*(}1Y!j^~sPU:^V`)AȣPc\p6k_<r&ITl*-I}%ϱ PW>K[cEHfaf\a}}7\"^\QNlrBu:<2ӻAr,9TS F]o?ܑQĢyW#;.Qu 4Tֱgrfʹ:yC<:ﯾlj:%g.Cl8z'*{/ǵO]@X/rfȟ]y0sI0.(fY/t_x(cj /wvȳ! {v집+y<'~L3~r^pImz{snUf__:n搎'oݬ \>Y$ қB6gSmGS?9jn|L;J=?_~<.Ӽ~͋9{ (eR~į?|1˴WRlw77/<՗;͓ oQo}c|7ȳD,M!I7]xir[w-l5" uqqqH#mֈL>YG-ttVlQ*u:cg=URx,wyq;XašڶJ@̶bX*PJU,%jk(`0 o.GFUH2^F YBCD &8,0I{ΆkbH_w{?~•ճi唩tn] MM""AP9i" dxswrZq7 n},ށ6ݓL=ǚi2P7[HhV8{#**{i e:ExhPcZ!ѧ>y%iܜnA6 jm/9cs4ylֲ-t`3C߂͔^[Op Ԩ+^9vrŸδ"WDO`JiF:ABmP䊁56#Wkd+rŴ>=hwbʕCF:RXh/.Q̪}2 9 SS|F5g@MnV[xzolV8PkR*/ģwE=2ӷ`23Dw \ͶM$WAJn2tq{K-&?WvQI|R]Ev E<5.9'.^wdWxNowyLxIFx@v @Lh+\{|ٹ,]qR>@(Їrqhtن䊀B_%WD+rŴz7Lia+cofv,a\Z+>cЋv+)ʕ yfЌ\p ^]\9´]0oFW7i)[moHA;cWͬcZ5vP#Wzêg:4r XcpcWhDz\\ !W\QzpKِ\1iGB+rŴ ]QNP$*MyW7#Wd+rŴ v+8 :0%+նbZ;`)(W(WJk2:HI1?kiAjOVȷh}C2Mʶ3Vdh4SeeZ{4$W lt3rŸ^o֋1QNG jZ5$Wb\hfFiu S?JF:Aމ4#Ẅ2C+}&S+/!"`TxWkD+rŴnQ*1?\ ޜ}J`0~75GQ$ C(WV=#5$W lL3rŸV"WL/W3J? ʕyhH+ߌ\1Vi]rhGzB+v"\%D+rŴR ](W'(W P)ݐ\1jhƻbZi.WL9CGzƯ4%d̊5Sf+3Z+32Ja}X/+S#ɶsX/ŕf*9yYjV}/!5ہ sRy ޙ;P7Ϸ7:@U͌'>x4)-)U|Lu3rŸP=)ǑГ++!ZǸ0ߏ~(P\\9ji1{h'd\)[+Ճ_t̔fNQ!"`\+rŴv(W?\ ޞ G/kőǮk_?J5-]\Qz@lKrEڑ+ƕ؊\1-Sj1 ʕD7{#fqiEXcW(6v5՛*0%b`+u"Z+)v(Wo"WJ!b`e+{WL`$JzpIQ_xc3NadVoo 6$ t32͸޶"D+A]RS (Fyx[4w+ΘU2EI(W'(WVKє\0lFT+rŴ]GitK vqlE֋{WL wuru +7c\ӌwŴ.WLrպ\ ޝ #;~0 W<9hHQ'zȕj۪#䊁];rŸ^"WDk.WL)q+iߐ\ی\1ҭ+Ei  Xjь\1q"ءS/OX~AKJ0 ZzKpȌOo~eFvj֮\9UK[l.ze퐶t{ sϥmp"WL)9^pސ\1kǻb\L0H0`)Qrur9А\1Vi\1GUϨ+u^ǿ.yKW?Zs=(6rGڶk?b\Z+urrŔޏrur%ђ\1nǻb\#[+nrŔC\x7+ѐ\10fqu3 낖~F:雪(ޕvzsm&vRN3y7sLJ4Qemw6ܧ_(B6e";Aƺf[4eN>RK.{2$|!S|y`<ӫ.߱be&e73IQvABHr<(5pC(EcTmqqE]kǝwDt}Qg#l_<rե(4dD X dt5.IPKY9_/$C/ڻbJ]=Z|(TQ7wu.?`d PM-q++e&jBPwȕ]WPd-PBH"dA}Rc)H 3jE6!mTw6hI 4&l"AOu.=l-?*o UD*9Qkiw SfP YtEL1' JmBc:YKKE{T} 9\]}&j-ժuQU aUUB<)] E=I2-$sl`K!d1S5V) 5Yu2cHTbl mW,ȘW&ː-4(R Ɇ-od$5L!dc"7+8SV1ZW5g•o!+$!; R|0«-E*}*!Yκ@&Ӛ Xk#AIk7͵*%Ved)j9Q+I2ն蝨ƐU1HYkNm_KnzH."br<,M̖EVka=kbJ68HTJ*L6x@ m 3h Y0!PdA@ԅHY{(@!xP#T=)LQ E_Fz<ȵie"'A@]$/蠐oAz`kQ;usJpZ*/ y*X&/lUh2%}# yDHU6*ЛRC{Uh:[%k9&)X\"` ,AGFR_5\c|L^Ld%m{P2yP*ؒ)WBdJ`$}V*'-bu:UQ`!sdHBѠJDwld&QPk'x',s@*F4%"yF\kmȲ0ۦz?dˌA:"LZc{.)Qh;1MyֹTƲdAѽ~[+*!]Q[zeY r3J5To]\?7Tj̷6! !j @*f"73%!^e֜ s<:M;"Ć%ӌ/%RB`:&|J@(v8Cy #3Kpq$VZEAРM 2ݙ(@Hq;B 9hAQɆ2G@PS!Hv\F*^eFTP"F_R]@Lcr^5kIk "p :$؄YV+Eb0۽@VQ=r+P"5 ;aPhr3RTiŬ7I#Ř@QEIJ!pB6}*عMg@W׋ehYKC^jMUkW f=f/+&}mZI|txK(ƛա /፥MMJ)hGIW}HV*ʈ2P SS!y:4C {暃Έ A9 IV 2*e^i C.k<<@= %$0AɣJ(YLZTP#=2Cm7X:JTCՏD>/AXż5YK1e@TDn*05NuͻhwAj:"Jtz Ig%JjL(CA(nK)AY$Վa<K1^Q b.AkF,F,-;#hﳱ<83@Т ȚB\"(AILMF"h4J?GyP* "78*4\J~, -BȲI@>flqRJNX)՚}-6@+`%gѝj$| ^|bt.)FRPG7KNiGۖ/Wuno;O/Imdl3:Fo:9ƣggX./s9(mR,Lbtoդ}q}husux| myfI ڸjl9ZaHNCDy.Xɱx rEopp@@޸H( O dD@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; tN #z;|G. DpmzN B;N \L v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b' ;( !988780Kps tBN b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vf͐@p@IZ% Z@PO@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; t:NϷNկ'/fՔZ^߶7l V}2-`Pjwu0Bpu_ҤiXG觭Y z' ;z .~]͗돂*"|vri~Phw6 N ǰ^ưX0lel:'Co~HR  ]Ptut圍jHt^ nC+Z;]JvS+n@tE} ]BW@k$*x]pn0tB ^|8]JkN"]]pá+S]GOWrkjtz^Cę>R~|d1'\mDkգ՞(qѕgeeCWׄuPn)ӡ+7n@tA"n0G_]eJ]&'\/BW6}uE(c`:A29sjX hf5߭brN/g v`pgT|'w&[HfrRE+WZgV5t"-.a cOm0ouXV_h_?kZloWqE=pڲ{W*t)ttW)щrgl]2"tEFO p~Dی,ݎ^tZkl=j (cXdž@wd'HiD؈3)72ˁObHי YZ v0+&J!/-Jeyil lWw;vJ+:)ҕ; "׋8"*;]JיO^*9 ``芞uP h NW2z(wqP`c# nCHNWRЕ3!hB=:]ظHGB#xd#RrLW:R!;5"> V utE(ed:AR;+DW ]6ֆc+Buӡ+5C+v RC+BktLWHWF݀ NWFYP hrNW*b$>OJvʏk5|v[%M6@kѯm`Chit]`CWFpkvktЕV!]`c` pZűPz)U8 "~8tEp`+u=v?*y {CW ];]ʭ6ӡ+[CWWPS+0 ]\BWc+B, Qpd-xCj5r(ٯ]Qy9ꞏX~ 1>a~#?a[3e,C||ńbRJ`lJUp}YɴmqveZ۟{^| L@ՑI]a?3ߝߠ:m! v=ȱ>U=Bqmh/ֿp4o>:7~_zz;Z}7D#:҈tVt޽t37ٰbv]Vp?ױfr>-n~z_[2O[1rRlbc/n\dWm \vo̙0t?IY7n2Ck"6!unW1V'BյWצRyӞ?olޠ#nJ/ ݱnOIW'?óm%2Uڂ̚ZGo/lHE⨴ҠF9|׳zl^^ogO9h 5V~gۿ*[g*2-C|xqM1붬jOx7̧ׯnk:,-/6'9Q?/ZA_:^KKܶotrSnGt1˫~)Wɛ鯟'7l{3O}PCzd_~=/+䣧3,M׵h^&4Rh(١A|sh:)ƣg],TLGo9XЉ޴I~]>".0Vq| -.߷ 2+Zjl9Z_ZC*\x@Iei~7 fO:qW-|ZGNPc R6g2zM=h?|_rtQa~u7}w[QBKoDZ;|:fұ}v5m6^Q2~>> <\c_揅-Z&Ur JJb,EiHrV7p6Xmt1ث*]uu P蒰M ]r#9_f#& fez;6xluɲ[]]1`bVJ*eW>K$@ JKB+ ^ywؿmk,;/3r<}"/6.-a\a˻q1=,f뾸}n2ٟyw{*CW}LJY!B$! ڡ'!ņ4L7S %lQDP:z&G& },d")fkp8g%ڇկ/|e EHx2qVYl M<>9vxmw};8{8E +;9kZ﩮(/IgG03c[l* h,Dܰ>dubN 9lIiC={0JfIJqP A{.@"V*H5߶Rb"JV{l󝇳{=_ vT*aԐՆm0n|e2pE^v!Nlnࢽq@Gtgr[8gjV2 * .}"r;xpHE|9eOpsm u[Q}\P!H8RE "4N&ʈ@HFCJb"f5~pDkS4Ƥ*R1 d,hD#ƨM(VhBvlX=x|w"R${^bTZn1j4~YLa;(<Ӊ>y J393řC:Zh<@q(gmISPl3+֥N y} R&,<"z(VQGP8p0˟]-l<݇mR/5TfoU7j]^5JdoPϲU2:\ah=zv\<JɀhN58>Sasi}-Fe>+F0E.#sϧO6'ئ<#)Ͷs&OQX [uu6<%Z'\r1`Uj)$&v2-TNkA#fd=(!UyIAN8u- g?kjSd_F[-uȦ2fOIˡ[t{{+Rg*{Q\ut H*Qa^ې rH<%YJe!@p&ju`͠4wY45A.Y&K$(z5*y*& ̤b9s"XrS=ITפqZ͙lcǠYg^}j .AoK@SrB [>]@(EeE""kóY(JJۈ~P*YPNk=HI_Qߢb|u ԭaO;yڜM̺]vq&@pjٕv&Ĭ^Dw;/N)sn܎zc':JvXNPiNaIs}܋'f3^x2L&!Q&< ;mEsF͐{#V x%:u62Xm&]\d/15.u9;j=]H:\=7"Lkkd&@EcG{G_Kx-hD}ⓌlJ)DBFLрQp9E"Yd팉CBܦ9(hִJ>d>)e&88xѭ/΁%4|4i2'H- zJsoRʐ!X CA(r"M!I7_PbGهP} v>e1f!$tu?#}ʼnis/b f(֫N0$:vN:u B$ƒ"-`? $C,-# He*dA8r8W?1 2Z ܁Y 4{˯ؼ #t5fm*_UADK&ȔL٠4+~)Xau->¶| 2s[T:)]^Q_NogҠܶ< +yW}nOpQVэ.^ä;_W `\\l^roLLnNzNױ/v:~N^v9zYMPJ0\Tz03뾾{;A=8'ze&G)^hA{W ŦJL)USB,걆%K>կ>jqO/i1I2&'@{mPך݈{ilNHKs2q@314@Y M1s&0L+X[2,^6N 9f?BZk! O:PhI6fr$ծFU8Q;RdDvȇ?cvwVʞ%NNj zs5@&^Spgk h /$|ZVrG/.訄64h&aI0d0}6~,2DJ2D細ɔ 65NUKa<9T:i&'GV:+Hs.qe2ruU LfM.GERoҔb&!dq g$]K;0A*"c_38unjGF>7s#W0S(UasKdDab:O4K5I N!ԔG-XF#JjH ͝"V383ot^Vyٙʋc^4#/x̅p ^0"KMv*!t:ʢbFo#/>/;=:`8_T%DpԏZQ?eackP(LGIamu5G?'vϗ>qz`˞3~B"="ؾ?}RhW!b7-a\ RqVoV6£7>7e9jJcw63ϦfmLUsêNoi{䧚hW7Xuh0|gyVbY͏Me˅'jbc96K Ʀ |1X{BS#>[=zJkj;hg-46X b\ri6ghӍXaZࣴ;j{5gߚ>fD=VzbgT,],,PDw`ne%ͽNx]dǽ<+n~ktzk64śR.Ίold͝ԭ]ҋ{Z5|uYq:ZR2}/T} dLx|׎w/VFJbmi~zGq-\JΟ(C@ (g-m턯>on*۶X>ےCW:zZv׊MmKs)Ni;0A^u}oqOe7yg9TE{ [}j&}{txk|KکhZܣ[Yŭ&3Hc%Ų wi 4 jϭ:6sIW0_\|ڦ[u{3CjOhZ9q" Uc&]sUް޶VX Xi]i[n5s]YǮ4,{ei0/2,9e/KL{#:>E HzK'UAux*< x6, ^`3V0SsDqJ)cMg' nH-0^KlO("ߡ_{p>u4qx4}Pw/,ϩΨRkKG)#s&ba5Lzkn0=$:mljǎ$&$xx,FP$љ5Fn<>M܁1p{cZps.IarQ[%ey65(.'OpE(aJ߼q+,n?k-pޕ?a?&;%L:K>*~v9%I"xR99KCEs)T(VLp-[0<ҍiNZlNX +oh- 6LXNdKdFSaD$%-áa/;kBN&M?D`}T"aS ! 8</5JddXFG}WW0NMkjz@4SI%a6\XZ@a% C%gPbR?.`lHsk0e1d M(Y`Fx2w/1)Gn]߶_DKNu7Z?>+~pbZXzSTi|ѓC@vcm^Epbů)֦&;¸5w5'즑^W@n&CŽ\+΁]U~ru\-ax&l~E)9`=u]zRMv3~4ExךRMemT;㇙M-zꩢ>ǷM^+^N*GP+LmeK ԀI6K&4y3=.گuNۜLfﱰ X`΁6ܼ%U'U'w - vc0ϟNsXhWLzͿq>r:oTr-cޛ\&к^Q1aq CJbt |8* UҜ玂VMY?`68u cyQ"V+%=a( O:D͝,6JЙM~ [MlOYr}ӵSOpQOpG0 ^FO`5yU*sq@GI مCJe,xh)r-PeAX^@hMwᢢ}qGTJZJ/x\FY0`*^wʂ5o·U{C ˬ'I:#,k"&K 7?"73IBSQ,7(M4sL&dg: w:9XbS l"!>tHӑ *G&fb``yHw]r4^K=2TFQ)#X .!&ZKjy.IFai7)!/]ٍ/יm Y {gƅYӊF3zXkE^+ S{VDXk5J)KDWX 9Bj0at(ҕU4ҁ6t0tU5C+@kIF9՗CW]Ϗfkt\#_Bˉz%#+]z%k+O+̤ ]!\mBW֮CGzt rHt ]\I]!ZjNW^!]q#S+` xW>7>%5VVl~K}͈KuV\ۚr&.8-nԢ /0SO?)~7olٜ5'9SO=f@`E|6!yX45|(4hm`@iiҴ419 BJ C+Dkt QZ=++e)C+p`fhy+D)HWTft8.3C+Dz]!0#]2V=vJW3:B|0ŀ((xte Ñ"\BWV^(]}9t%Ÿ׮D_^x}?֮Bɶd&+]zw SjCWt('26 '+DHWѕ09B0}+D)HWb3."cE~_' n ^N14TіGOy.g~0|L,|m@{{%ͤR8Jb..OY5={@>f\wHI\4NSj ODQӰ;;^9 #-Ko%A0y!>.з?fo=*푛_n[ S˦mlҎ B MUĂ;geIYIMYkknŗw_Tٚd'yϺ@hMRgj^1D$\4GVre{nx2"ţ5ʗuOJͺ)&D4ӂ; ]\ɸ/thi;]!J%;:A2q.=+lYnB.|^ 8Vr(9r.Ot^\ J҈2Tt + rWXQ roNv ZENWR^ ]WgTP&o.l9HtUet::TXfKzCW';ʡ=Vͷr( %Zp V)R o j ]!ZNWutut%' ˣ >VR(-S+TcOWX) ]!\M c]\%w]"]ijsJ+؅gqߩB4G4 Tj3V0%@-1&MnSc.-+ESU [&+˹/thE`D)UGW'HWVYcGtg:whm;]!.VBǮUSm+@)3ri:8S$+($$~D7tvd(n` W`2u]~7V:Fd?ą\$BX7 ~}2z~vh>̆v8[g|1 $9Jf[eo{v ьcV\bt((L|Xj'tL6f_/돲Q{968~8x;G !/Oqv̆< Tavgb/Fe526;w1ip(*!^MN`0@67gNa~zMV[XomZRBJVb .-JfqE\% Zɋ o&h$tƶY){$uE@-࿒_6:? g-Pxc_1J ս1Dd獝7j7n7sXV;IE0F$:͞sw:/r󎵿UEUe'r4+9FťTґ@jHv L;pz^o \HU=)7$UlqͲ$ߧ2fqSyx$+s[*UV&L˲+֦uu/D].,8]oS]~ZGܽ+.t-$ &ZgQ+yHukUe0kNsS󙅯CV4c%/+eb|bA\+,DM)JbMFkW̺\W<|Pw_/BKq(}ڲ?;l@ε}dvNpF悻qզ؃y8 (rd{+.H'I+cXӛPJ0tf|3?F(83!su7Z=й+j՟-+aKg}m! 3 _)itv?*[n¾Y]4¦;,Vr@' R.)r+0ͻǢ[x4\Ncؽk k-PEr`#NxpJhIPRK"HfV*\hPfd& mv-ރYR:' L_qad$m̅'*Dcg*f}W@Ӣmn]P{A 9!V-ocً^[5!$D$DmO Jû &DѕRȭ?r(HW\5<+l.xWV*"]kGtp7〶Z QNaN?(vT*_:i8Z—^J.{>[;zKlXP~=_u~JZ{BVOrZ`R`XAQܔV(hQ(\frF;QBOBJo齴R.!p AUjFnݏ==p33g0QW,?u5Z^~Br<Xs፵Gbm2]c%\{DWjz6* ]!Z#NW2 3Z&f/pv23ʧM%h: MZZU"J)Ҵf9]i&'Fڛ7OykQutute' 3 Bt(Iҕd|0B,1 ZNWt)ҕCWo :o*TDelGWlA۳l+,鱽rpRh>R0XefVЕ-AWCEϸLzDWvEWi_ :zB9Z]`vrpUm+@&ЕJ]!ǻd+MLç֧ax5Iap*N/J>v)hLQDP%PUլrB>l*d$ar@dD髾+ޏ$V0>B;S;0RJ/ e 䲋hN1\ip~AWg25zVxWXG.S44(hiZK.,-1 `wԛMSVȶWDWUXU9{~?oه$ f8HWd{$}E%KJX%%i%E+<T郥i^D1Fh+l*T'WBjrR%AT+hz muoOBΈV DxR>̦y8CBi:~UҦS ~qAsKM!c +yPaI!4ԣn;JwH0ix}tdb pirn%zR#&?YmL&mFEx2^=nk \lUqN7]>9;}$5uNK2yQe}rWeח"xWnωv#@9Gdܯ7 Eә=K(ýܠl4ͽ6a؛.pJҽhjr:P )L 1024֓yxF( _̐HaC2J*vJ7JUZnm [yK?Q|fԞe^dތ9WHV.h3mm)Z[+ ?ExNnn~@>n$ iX`g?8Y:|c^כ8Ƕ.+$4\I 45Hb˨ #oMa~NBfXAyPs/'U<='t pq}d+A51a 9A|+iT7|hw-٩4SV/ }pԸpK`OpKL0`EJ%6ԖHfgIDZ}tDibbF%ۢ$RZȬy ,Id1!a/d4eQSZB84z#J^hs"JK{jm kj-zyV)Tk}^w8{]y1\ b͐2PiH9P쀁zj8٧g #7j>+a{]v:5(IDo<9-#+)Kֲ]!3Jbeq9$L%LH^k -6+7°I!L_f:Ě"@ dFX,"F QߊGs䲆&MՌ`Q!IaFm"gEa|wr^b{ƃώP^}5_1>vE}ٻRNNT-.U,Csp$YLL)bF\] HWɘ*^s> O6'^ӵ4<㞣kމV1f4+DpXVErI5HwXL .~W*WwWWu'^./<# 7-P^30cj`<]Bk?NnQ}2=ߕ B8n5|xhIM1 ƍ [g=V,X0&QW]Ӎg..':}{n<;V@dZ1梏z!\"(VoV!̖=}& {VPICzo&CU;je\weEx3j prYHF.KOh-KI0ο ah's><^qU>^|y8ڷDfA:/-&6j ߍ\w- 5k<"&"LYzRL:6-{&QqrVTwU}A@z}lj[Dbr%K)*زb}F3-6T I41z WVʹIJk+}.@Heۦ6"QJh7M"m]RrI L۞BQqVXj491/`pP$c߅ڙ6G#cT5xU j*S-/5O2ĥPOCF<*x.=Պ &{~u/2Za8:[tXzׄmϾeoͱ/hi 'N"+zf>E ExR}M>gw(yM2{x*Wߜt#F9S'Bt8p^r>1B@{ 09xN甮r 6\LV}>- W鼦@9"F ~$/v7פ]^vW׫/dV7@o%*;[e Q;u'#[ϫw'vG_<|- Lxύ 8nWUo:ٜL1sv\d@ 4Kݿ]񖊗\ΝG!)uUƇSvB)!v::cZYn9B z*9*|e˪ R=܉qҩЎbqaCDYOGƈ/y}TٲN[^7}taBnJ1M.zPv,[-ґM¬=5FQT+l\2@^ )bKYL>٭!1L\=W\A/JG ^lsA1>tv9\G .}'#*t{w} !OwgChLe"yc]>`oJ}T@jdBE??\bQtLW/PΞE=R y*!Mw'ϲ$D><.Y;1G6jMbyy_ED l\RrXM0a֓QyP1xJAeQqRP}Lƃ6[66TG}jZ3 ΁03Y1=Tͺb6{ * V#֏φ}.& 1+Ծ;Sb;ۨv'^Xa 㜅u^4ZeE{<ўYim H;NŶVN@\<)bo|\C, JlXBAhrWt4#X_ݷss6+iG+1k[̧ &]x:G7^ ۼ}ICAgWOHL=ԴRoQn2wS ҔvJPYBU,Crp$YLL)uF\'(of,jDLf?O07neS31)@xg{s;)%gR DNYN7njk( @S$!`]iϴ}:R0F:EȼGsDST41be/O#b 3 ?ˇcor܍ҦBz׎Jt'睸7pF@'DUz6?&jt9"T\t`T롈`d׋c=ude")PiE(J`X_N?4V Mı >ZV#^'VJ~+L9!aBH  ;6>}K;"2(R\6o]t<pUy1\O#J׺->Y;f[XZT5H_]i8ٞl.ZSDa1( 8K5sB絔ܰBTDɤ@( G^u@0;Һ=N}tLe?75#/ H+|bRK=黫J<ޕLjB|Q5XqTLJdb?i~: fzn&AD@Jū6$  ۅGS`^RQb`5 , r|>שLi}ހZUK˜==|>6B,"oAELron$[(~^{W:6-@2ӹ%v׾G6' bu]>|1[v`_#Ug2tG1EzU,HbȟYH%l8s"1M`d.g Lg.8:f; 9@$9m+SudSLq]Npͽn[>[73>+ X\ b7햓m@ 32+MYLWHpn砥Dԍ s!9 L8qq( SѬTonI)b*וJ ԓ.k^扒{A)_~t^4~+t8> CdJvF6gޒ R 9$ _k7:&!eP˩_e7?2y=90O@j$n7.,J_E;0z9%1G W)پg|и0dNJ(Naے}\N%pPJb=ʆбW% \ asX`!I+=$FI\{qC+<R}.=`Ogj/ʬT˖=T  h=AC\Ԋ x(d\e2B${fyRvjcos7Zѓ>=IYi-{ N8r{(V[tx"p%qL+վ6h'feFocP}ߓb>REZ;D*TG ȵon4:!n gcz9v7 p.mitF>xtDw1f5%0̸uM,kyx1X<0GUΏ9˷l#IؘX{!>MHG)!ERCp[-Eq^]'aY3leðzϿ!ot1$ {4T)Szwg|ˁD+~ N ]N J6+l4"^mk̅j0b0uMU㴧mrQ[KGk'$m8:Ywjle:OK|c<|.GH^jQYt$a{\$ݸ9p<Fyi굚Gv_ 199COvLC _ׯ0Q!X#0 =q x&⮻yo2cƺZƍQD,=;ݭ*2)- սwoie=֛nz?JK(mvLjRSrzT[JXX+~%uBIn8P3»8[x& FӺZ=ˎ`qWMն(ذS- nrEO\ɠpإk'0!Cbe#yS&y0 KuΪe$cOJX!t>) X8].*̺: ^Y \ =yC]2ӓ#3WDBH.9玣j2<:ω|aѢ|zv62xqqEyt\tod$;w:p5 jr>gx{\Pe=.1P3eLB !7Kz1vAx`蠜(a`B XboL`g"d|酘* A<:oZӧx4[RD]=$9 ,&ھalxsC"aq 9DByɘ! m0x0l3r*LsΈ(Q Fр4|`N3 V>e1?{b- p/pHa(NK]2 `%,0+'7wʮzBJiǼQi YA`#9G fi$Q퀱EɨTTW5v4lnp2xuݭ'b\?6 iD!Fpcɓk,jw+Z~< Ѧk=+ tHgM uHh1i+A]I&`B( hhhogQ U c-`)k>"g0d/5ºݳQ[Oٜ("e %][uiAhWAbC`Ԑy<{yG!mBchkSoz ){+)`uǏ@%mq'_/ߝ]4 }s˧q㵲w^H[МD^x nE9-<ijU44A 7gOdӜ~G_71vt-i~-LNŞ[" tM n_ao xJW*m=g$8SZg9N)IMDc8*5l{*uh5aLF]/O&X,ic .czc f0E5e~PlG/fw,#W){"W){UwGo`[-+,H A, BRaG/PPbV6vG.𣥡!nVNֆQJ:(X[%}>eTPAM4n/cєvM5i<2#(n R/ؒðbp&AwES{Z{52X ޅvJm|l~hhW5 ȾqC0dNZniSCp} YcO$- Zm#9V|S` 2`);ù Gj`k5ӻRx}Š Np;6(wizoJОEU{X5]6ƜDf&%.ޯ1b4a4mâkgDzЕWq#י_b@`r ,jH#ዡ,*[*# oqz$jǟ$2T[IIX YzXq6܂ƛa`^Y2 |e "kveѺ-1]$FYoM3͇d%0FXQƐ\'LȽێD{Fceu^ E26BChbCH/V4+ 1u#* .[dNnqZY"ꎷE<*źQ~\ w*!v`GՖ,><Ɨ%,5Y.u1oO+ԢRWE4gq%pza o ETB!>g _vC/<%ol\X{gwshs^6H[E ]+SiXLRYAhx> {O8*6-"Elm-o͇+v.X>;\pGs99mLvx^ Xh)#|_m_e j=sRRuS̛Fм{ƒs{[5YkVt}յoΝFvxc!Xw=YUN©e pr9mtٍ}pb8 a&FzW`ɨc\p!3J aE A4&5_87]4ƌm.8]1"l0~y6 ֗ɲ>K`&͏ft38 {gQ9u_l6lgbXX88$`z"e\Qc),='+p*ehGƁtTV7!$]";@j-֏H#ҽ<"ߕM߳H/!TEQ\k"s08`pO-HuJ^QX)$͢brotpx ޠdŸIe2X a%qV0wԊdLgVKܒLZHSL TU6Q"ds;?3x=Q/?-\A/raCu j_a"i z ^V?bK;Tn<C4YN.shOJbNpr~gN $˳VVIV5>6wP&ƫ\k=1oaцRkkixG&&RsV>l f!,Dm^]UP<=a4 ٴ<ũ+iܷi2IDVF suѥl;l/d`AKYQq~nZ,'~-ަ愑mBEQ*iEIz9-`:PInByQc~:YZ\S[v8wxP5(!_Nn7ʷ>Csy9[;nR.ՙdQ+_Vh|7,]YHn>m*@I;7F?{W6 i4*} =I<] .m"ʎ9EI*AxQU:uݟ^-4%2{ 091$\ztdPt4ǫxV W.rQhZkWFOѓksR|U|^IBX7|UT^:AC$@-z4hb赛ZT?]+_kFM5}]OOڥ­ W4ܙ∄ODqSְ~GU`DKqD[<} Jd.cPn ԊӑƱW-;w0+uY ݿLΫopѡ#66^MpD{noy[J ߩu b\ YN_l~#+rZLwu Ͽϻ53X*dt nPL%Xyx+???~5CdBp%yX_-' #O_kCPlɺk+ڻaoMYǰ^˧f%{))CppǦum[pili1zy#As^^=i';4X]m=hVCd3BjT9f%r>~MUxF1iѣ6a|pfÃ]SUǿh$g ^_0G/O^S)uZ$ 0KvL%F0L3Kv?m4>ɬI1M>\bg lL,&_=߰.yKn6B4[+$SOq|V"81Ohok~,9ecp*TI*~ie!x5wSEl1yݞe. .bH/JDqv4A߲G^l!dOI2lcUjil֠iBE)]IʣX7u.٦1ޓ+#L&Ii#5nNlXٟ$baj. $SvxRpF0[ i2xG wM/Wȩ=T=i<GpFD(q...$q."!p6Ӫʤ2֠GdK!WǛӂZ1±Q\zn$]jt0s? \51e)OpȀUfI*Jd5 ]MKnWS2ǑT0&IP$o&(7mNJ9/YxFs7Ea0"D&$*q'gbX-e6,!'\P-(ih۸,N⥱˰E0C,aBK0j0Yx.x,`z/.E;NR^Vpb)ƈ`ja*T"p ط^Z Q"Wf9)KKUgkg/W"'SLtX0NnCd٢J5iJ t *)J4 a<.P1-Xrō{2-z>+_Lu>^}m88aWv/Kn= 8u*Pꓛ^"9\t9^ǖ1kؤ?ƽTBte3g鲅{P Ʌ; W9TS"_5\}I^'$羞v\y~8b(Oj.Yu7jQ,q!-m) {("/q{œVKЮ6-g8Sԃ\ud4tj2Hav%EC`u9ZHԪˇr\̕ ^}phіth x>(_o+H2QR k,yhI@%[>E\6W"~hu?jArQ$P*HRcPaWgT;MHؙB<|u\-iҡf]cD`afsh-]ޙBWLR(C]K3sNQ$pLXDŽWNCXsqY昘Z8ҡhQۚ_NN3)! 5Q?lmBɈ+y<#^Dұ|2sStxZNHDA:<ևzsp>f+\>eG7Pǽ)ƪBb(3gn!7]-n>Ssi_pV_>*x`l_U v"uD#N1e-_wE;;c.MZTO2CQ4-]ܯvms[Y;>~?Y$dw3IQ^%2zNv(q t<*tQgJb}zUzyfxuڒr%x^kr{@K642qRo' DFs@$]d,xMI%]Zeo7G+)@Ml\6L7iPޡ]Hg-ԁ܃pIe ot:w,f3؋+h2IOҧ~sid^2}xv?)md_w+պoG2qq}<큩Lwqx-;*sL@?4 ΐR| mFUtX<uףPݜ5b)6BKfۻNi`BSkiQs/%2|8n2)jF4nVhqjp44»JЄB!N" yWdSڮzϳLeK$'ǝ @CYiWh-xդe09|Wl̰6 DJ"uP)E30h[ 5Jtrhkj9 #U=+hs?X%ܙ}0j4sVmQ*R(ǫcTr}W%u~-r{i4^-m ;T^ߪߪ*vǠ2/r۞ i-ǚȎCs3e܃gW?vm]ө+-4l|O]Qod7y}mot_翄”5ٯ75zxi} sa9=61@Y$=wiUwqb^sb0˽Jޤh }œ('-2*m?z2d-:9B$` 1V1ːEEh 8 xC/8$n jGuS;R;h[ B"hGhn!pWq`0nAfڢk:5+nb#kK/Dݞp\:lyt[G ε(foJ dp֦FheuɎ|Dp鮨dJ҆dm6++M'Z!a5KP(DVn 7!Ca2兆;vR$Ac?>"ry7UCJpzy]txK(t\7&idw*gg+ӹh>ũ9)58s*ND - E,MHLq+ԅրF$i0chvsi.r>8><,VgjP YFưDpQ`(a*ZhY-xs#f«qUHqʕx>8SLJ:ң ijqC^`rPcw^O{nr>ށ+?wA‚s 3Xfdt hPL%gEìF ~'`ۯ'Q0_T\?$}1f~v iŧ ڻ.oY^^E_̖]\-g][Gv$ &"8Ұ(+v`4 w6e0K] 7jЬғ\U ="t0ld2zAw3kˠ颌3+iLԒRi"ӹ*z`=gRFW@ Dr2LB4awG]{}`&BvGjH.'e6rr 9 X0CIi鐥_Cg+$JV??Yg[dS.DžVh K8u4-gTmc6;D?^ϕJ?y RipWAPMYD!?VնFʹ@Jpa6GlMR/LcLZSve䋈 0^ y/=3R'-uL,VfTnc{Қձ;˝-M0I eF n}N?JXN K)18l.I^n?SDR+NDh4qqڲ:~NfR*;o IrۋtS '|*G2r.W4&F-2j4Y߱q}vcN̋MJͨŻ.sdsk@[{Nh uvY<p?vG&^H Ddj%VRQʃG10PL甗^G0sOJ ~%}x=hJ;)w*f"|'#YsPs l.(u"WZk^"K.g|E4}Mͅ9!"lX,{%䙉E$ ^kaa;i`ΌP-5=D*vzRv\4un5"ϱ* ǼdH*`k7 5`ݸlR"U :·,R`5q&7'EE嘇b*ꁷ=QTx=#0%rUSs>+Tp+L泱OK `oф1tOOa6d$6R~v6__>eą(';ϲv  q朲UAp/>KHt-& 56eXTTnQ)ύg  [0#1'Ɠt*'_u9Bc}yQYnܐ5i$ #e]ΙWs/"o#Ye3{Yab4j.sar>EV 3Xр5 .~Cv~g hID'@E-O>M&a_ fqt}|2& vɴKM;Ma)wp5e'a}l!(3?]"xbLflv!lTd M&#>|˥4θ<`c82~K -$0ꘂTͷO w{@e Sop9;y/a:^N@Xa uӂk㼆cp1`zcl7r%aZ\VCFxF4蠄 HPX8ɂ!5[<9[b "4t}4cE1t 00lz0;1𗳲/B*i*]+-oO,``orU~l|1HJ85#)MJkO_J Q2՘ nJ9$s{}z0uDu;m9TZ6! 8OxYORIi ;e)βb蔃d]Mr->QF_D G>B+Z PӒ0@Y1Ĕ|B)\09@H97 1< BtqhMwlj®j̱H)9cum>a?a.ֻX^jչX RVc~[#(Y߱qJ3ñuKEXᚖxaР 1;1YqX8 LycTdKk Q)_n;:1N=մNr-X ;\h'b&DQT ^.LF5s1[)-ooANeY0sۏ~-R*)x~q{lm p'5?~(Pxi*V!Te`CJh‹񔮇W0VTk>pxMìCgT\žzx10Wh@G*)}1Җ#9V"b- \ Fj)Ʊ@]w YF }G{E nP;ADP#Vy̤"{$T`ʁ c(uZyS+,Ǻ*&q܃6 l1j Q )ݢx:u_$ ?cb&zAB κgFQ bDH1On)]ϱ,&o|2Gp8]K5RcCƙ oi  ?n4:[^gʝE)+`rY_6+eSE ah_ q""| )>(L.DG9HjqS: *`*)YvReؗuLaZcgMA!ҢQ!x#A훊,'Ӥ DeԂ'-cSt,F&V^\([NqW.W4ɵ]03wWkŘɼ(oyZ9NA%T"J[NM 8iM{]\ۮ,>PH4'Exmr<"o%_&(Hs"I1[ IL*ؕqFr81oge:ijOik+okS?n($V_FXpDF;a)r ÒKOTv=8+OoN'_z0wjL (I}&YΑx0viAF ͇LcEoue{z)EgW_dM0"j bËRT;ྰRh_6s{apb8knC_3Ji_W<'B 4M$6{`. 21>Მmyug>EZkNV_x{w΃"ri &:Csgab|]O>UX OHzU0TN hUPKic{ !eIyXq4\oa+nPȺLa8#$gv oD_ci18OH`bAK e\cZD8~ Jj 1:0@I{MKMǹ86eSP v6;YadL1%(0Kaͤwƃ<`PR8$<b `XKLGj!,vMJn`p\,8+ F.K7vXB[_J`'>;iv&srh0BiL^ 8.c;o D{uIyIy'i!:aSڊ[\7ucy\7ucyyX  pO"c , " psNS: xf8L.9kCV->!$~MEӽh.G{/\"ZZ33 2Xq 30K*Թci\wHX`Q_NB6/gG{?l6><8h`1?THBa6slj-I(.B C2Qd2RB=1ɟ[%D> ^p=JG$ s^K uOtsJkO- `1̏1hwJeM LZW$:KltKxYo8e xd%O-k ԲHKC"e7wf(+4iXx 3m\*XΜs ,a 3 ,ǐX%Bkb!@xT * v`X Ҁ>7s@VOoUX t]~cAz)QTWP@ 0魠Npi[s`a-qЬqG5[B۷J+C*vxoƉB'U},׶@ dTWuDʵF67H3Ź1'0Ɂ[-!UQ!.bR:?lsu1"7p^|ZT7]}οuA x?j~1k|y3} odǓ9g?]p3߾EKzZU!#H܄ It1҈_=@Umz kc.;:V>->o=}|֨. +Sj 0FUW /;~~._]7?7=opN~q8[?:Dz ?|:- 3?Wڷ?J$;h?OOu|ɻ}*ܳih8PV-Ol{[BN"`,ygEGfGQo!q[H[Hf$q59\ɗ3*TCT6^|fe6~3 {?,%:o@wN1|zA"uo_7c'0|v^y1*DxKSC&yC-%J5.Vgnued.^gNOE/[f-}CzLõ%6Rh޿+fHy|<>iRRzvǜ`3pT`buRE8[8򒻶X6[%* "OkΘ{:ȋ$ʓA{7#_ǕoڥyZk|Id nj4fkUx*A`UYZuK-cN[b~]tVGxWޓ8==}V(D^tW#:p}laɺOxban Bulu.8gZ1o}$Z$OT{-V_Fξ& #ŨMxH KWm|N 1B YT9BdSԖ|g/[{.7B"JY7aBs;r0"=ދqzxvfs$ܭ]Q>)PQ\*f#KznQ#C.)ۈ-f~%.k$5wp#v]y%b65həoB#C& Y} HvF%2KDJK5.A|MːcEV@__((BUy"@w23靁̸3V gҙERBzw"%jnn TQ~&CrE0A٪D !j9S\bmzB% 08+ ֋ETi<`W-%u4ѰGÚ kѰ[ڟ?:Qۑvnoziq6>b%G?շ[*B*.dt!*~BQi iRCi+!P&=6TTH"ަ^A#TT]b[KC&W(53g0|+n&1ol\fЂa+z&p4eK))JSTG^8NNaY[=}KۊRO}$女QBZxSwj'|kyeھwͅ}xK!_c}As},sA.޾gW&oГظY} x@#Mj VQSؚ,A`]\gV!Ampxh9UUEZJԫaHmQ;$;umFlƌb~U[K)9bR5R-"EaZ[jP7 lE%fk8sr!rw_1v{Z{F8vws9}B&9BalB򙋥=4-+4J"y- X bLjCـ^£P +8Q&a~D4*Vsz-Er~Hy.Hdٖ @MׁEqdecoBA$3Z;V@m~+\Ur֧e0QkHb4@q/Y,NMj=]a6]0DI\/a)k5 7C}\tJS^"ݝl3I),b-(%UZ@BQDG.͢1/!|}CQ9RP1KuOD ^nQ0)Ri ]#o 施#ϕ-G꺡N'|p:R=%h\a\7nFh&-SL!JTи'cVo3A}=!.Ngpͯ uNd*@51A8'fCOEs倀8F9*Ul<CEs\4  v,J|@: .,>C=h'u51g枾+\h5f͘c5C qi}MKywrzKϗ+p=&L0^Wm`Ӆ[,p)Iy =MInƼ2dEt?^vb7Rw T]">0u"^#D;%nQ@P]FkR@{v *<Kz1Pa있`ˁ.y0.EoDK.1zMZ KEb{*rxY = QH΁,r"׭76d"\gZ0f(͗xG8XxToƼ^-;pwrzCt8}N*Ojb5"/3g\ԜDkRsGޣ5gQs>^=FZq:nflW ~%{_beԽ鿞(h{_͘#茬eC>p]E[BNoܣNi3PdvQ^Qh|z29;LB(Np,1BD_Xgzi#BNo ޲ 8k-eq)rPR!!˄oIXhnƼ,*E;샥*d]-!'7Vu?j$=&]㏣$U k=2JC/[|i||xwUdJ}( ־fySg:D gǟ}85~c}дw6+]_syo pw>m^YLK֧bxb #G@ z "ˋ]h 7"DxЕ[ugXY{]+S`FPoY~\Aʑ0ۤDeoU@YrJ\PP/ dڨKsRH9Vr R,\ U@>hVйpqv ~7{1*:C!~?Ѿ,-pv ВQj EhStUZi (GRMn 03XisvxHZlk4s^>w@}:E%LfrDpg+|r@IQ礞1+;DӺYkj> \"*&@E^@Qr?sTɹC6rWV@(6[T2ߊ+)8IPh`k*}*[3#NKآbyޜ@>89|7qO$ -3o+1қ3_ԢD_R.t(b<8R-o! DQKV$):R/L1ؐMdUkfT|Z|0!6WQ_['؋9kl譴\{uYك<,6>5QaЏ=N롾/,}%xY buy’KCIUѕc 5)ESkb3&A' %)\5zNW'\EjV80! &IQwng Tvwf]UךME#]>q!cCm1kq*nHx 2#aI'X=Ӡ߈O'_3 )iEL1U\Tzm6Tk+1Օ<0҉+dlIYٮMԪ5M)`M1 'r-g<٩ߒrI@E1'!L#.4,852_\¡߁4qez|sy`Yۢ|{*GDb lzg̝H%b2;ve<(}e;[lPW %ʿ)9_ 6Шj!{5c k1lW0Z}e{&]͎ٞjWp'ݲ[z' _?($srC"r(.xMuib5l k9+Jɋ e+,ԅMBdvJ9{xݝwJgx_|]ӕGNBrWr)CqܖR:( Hd3UFULVeFeUE'JK|xrΜa#fW]X*USj9B qΣ 71+qLˎm;Yt=ͳf>ڢc`W{AnQ-QФZ=[AgV{qJ|KFJ qh5 \Z(u ? 6[!Gyl)Fc *}UCաc%Jg|j!)W).;˻[՚i )8THٍ;"p%y}j鹲!:) ӊMo<6Yq9;ngd)Ƅy6[:~W{4ڮo? r 7 {kNu?1't^ yOαcBpb|(0}Y:Mmn`# _^\\]xէ}%ĕL/MI},;TgCSj` ݠP-(e66o>>9fxO]&Ȅ%+(Ay]j峁̮R`j)F "5ýETziLZ7Kۭ39qNrz(r-&,s֌|_賧t:gkVN(C-V^Mg`7M'v}Y\EGLM^@ 3>˧zr3{"N? su+BQl'++m46 'v:Ά+5sm1yQnv-#D3{" MحX90Fa7bm7o0_فN%;on1ybKX v[>o%;'t:ΑkVN?N@6%Hvi>󦒝OAC/jm7y(vv,_B'tt}?͆$ƴR.:Q`lP,2H\299]02Wen>"C0\m9D 7z_"`2 :D&O?zam)ZR!RP#GFէB"[Z(ղO)hA&g\* 04@ʽ5;|)ޱ뽳ڨԜQeLFW9sQ4ad7|zDXOY@:-o-c+;û~ZlՖe7C/T<f@[J8jyo{HΪ>.b~NB8EK,8CϣB,QIhBKSm!F2R3hʶWEG5)PN#!Gx7_^n:L!AllB4?~jTU@EJ?"=:EfS&V<ljadeszj6nEoLuJ[UR (KFN$:'kb]`p"ۆ)}|QXǬ]@&E0ĒQ2U;Z(vz٫vĥŢ*l%傄*\M08&SPj*Oͫwٻ8ndW ?E;,U288/*u)[xQ^8[hPqÊ}clBl2`ӵ&X(ѡk靡I8SST"2 Cy|ᴬl@2aaqŜ:y\ètt0eu{nϹá@jD'U19(ulKU4D֐fU ])?6"֒Q$(Jۤ걈!YJ `p+P_sYٽ(#jpȎ٢¨wtB ;QR*IdQY|r-E>vױrqZ!1EEN^QۧѮN˜Ya-w=gndikڵkHڕkUxgZZ3-%tv/Iιgeڅ(W o_]\q|֛@ޜ*%, Y]ֶ W-^[aa?Yb䡢KJG*pȈ~|NsU٘j b񎕝%'>ikYO^q b!|MfPkHM$$Nf V9EJ퐂%A}}SU<)$A l'V !ԥ|?m|2T9)7gvrzDϖAYmdqڒǸ%:D-z1EIIBi:#æl(W/KlN _MKI~æBi0( A5C&+MǴMX2N)q,XX[3gQU_F mznƦcPb@TT 1JP9<VtߖM2X~!dMBtJJRT,j[бqbI"[Yvm"<#ZE'q%sfx:Y+lZf]"yD?!꪿ȈdVӯs~ˆE荋Go\8)vId#7|u{.'qcQFaF;6mHf.Hz͇:xD8]),z4Nd`޻\ \Ά^WbtL1$3畑^[#kL`7j`hCmw!v f^^"/jˑ>rB \_d8 i?mb׆҃ v{uҪ]JV鸔jlI΅v+CN]p 8ܥT&:˃&Nw,%Og$E 4Y{2F9-X-yjs֭nuk9ɇ){,Y=ʐ[X \Lߧ?UηoD]]LTpKtafC+ ?2L;٦Lx}э ޝ=X z(+t$HcQB?1:=RCR'l 2;o^_DHY#+>1센s'?rrKoy|OG3݃O\pb_1*⚧W6 9t* a͐3~j ,@~_-OU}@}y5Oc2/rJڐ3TH{=>UT{Xێy:.%|auwWC4&u.e^#qUhz|1'oʰ:{z]%> wkUWΦT]./rŽ5Ҿ||}yuyf.Կ_ǶؿC՗jxnm7|χ?ߥ6o;.ָg뗩I\>?p%wMm<[β :Sdmq1DŽ2IRvmgU;ӏ|gzuzig:I 0p'כ[A.8rm(jI2 [ψ.87NtZ# v:AL4oD<k?ÛkZ5va}пռsksN[\^dºO7MjIt [qgA61w{ẕJ):Do/q1 'd T㋘Z,0"0}^fun㢥\~_a,)̿;.HVTMAb^W[ccN@ 7`M;ca ^KŹ5|.]kZ{D &ʡZE0 ʵ]m1IE1xTao:ߺtb׉v+AB'v؁YLD9ڠӠ+i);7O:{ZpLĎ􍃸?O'v"W^/p'R/ E^/YY%8؋+l0޹u)> ډuG]q#FqG%"N:] .tqvs=bñQ:[Y*D]*.w; o(c'v:pLN!OA^C<ϝub#gC!v:> J;ٶNNN9M;&bza sP;1͘~,ܛ9ya*zg:l(hylB04yZuN:ۊ|;/=|@pzmB$-F;aTN!O-Gvb׉vkD=bzŠMk]hx37A:kzŠNsoL؉]O1;m+R<| NnAV~Vګ~VSĵ>}v,יeOloj!Av K1W+n1D{6{@ N$ll}Ladmx|9G?%&kr4wF :mUz&ÆmAm;23;vgێ=F;VɮRp!t|6: q@j%|=+yxW-RvƦ< "RJ҉C!v:=er޷f&6(˾tb{N=&b9cB҉;qZ_b';"j8m:UMc9N쎈od;{n/Xz`Ryp?{˙x*gOIaх0Igj2}N8] :ygXQgVF?mo=qf=h`GJhOj1EKgZc?܃M9KՂWPYom6ڐe6dQ}MpƁ'<AzwA}]Wj`Z$u[[!0gIꞵP೿_0|5*xQ[V~O+mKf榥zb%3@ڰ 07P]Ĉ.&@Ybl\R+8 '^x0$uNB"ٮ`ʌS5dJr?9%+$fAU,yֻr,Z򎯄]JGN.,5F:ktD [ĖM'isOB ""1aXq9#/X5dMGU1rW߽9tpM<=vձƝp']]^M!ȮԮsI`'1of4F HxW~#tJi/*S<T] *::ؖIl}}m>PTo5Ɠx[ ڠǮR5kAElWc= ^#0p**JDpF@۩e+D7։ݰHw6sgC6J?R'u7"DUH9*0u:UH>yvv# ފR;v3`XBI3*|SgcH|lYʀ y5P_ okgUc= `@t)Xcoe=auǏ+)pFgtI 3wX2(_U]+O=CB*\9!agұG]7sla|.gTQlXYOZg_dBͧ]'DL#Ah}`Rc b[j&2Kuxyt>Cd`Ǔu> a =eڼتd7T%BK0e蜂VLx09:6ԨuP%% ,K v TNBS"19t+[k 3De z WɁ`bӋ-,M.I%2 :kآ \bw*ZZ`>RYdX:=ڂ^~,Ũ%8Iv̱+^T_B"' 8WcU4SE ͟w/iJiwO7??jFP-!(}ܵ:c<6!jUnT9e #%2H5EhfgO?0.7+ؖNDHqXKQP4 DN_D" cOCЖH扴/@D0֖ke,uZ'`4*ء13 8ʼi">z%t.jV .ݪκIOG62q\{)/L$h: ygb,o63@?bT 52Rg^ukҶ_AW]ƫoU׷ūn)!Z i%1 mS6SLjD$ 'kwZUNռ˜+*QrꋲNAVw>{5o;3훼4r_>- X͉uxYq3i<% B]pF;ٜSygiBa1:kЊ:xaƲ=gI+f`I82ss'װ|4cyH&K׵<4?$3hLj:=|2[q9I wFh@_Gz|!3T?mA@7s<$}r/-5?_(}޷]y>ulToC0:Hߧ_ݿiTb$n"H›an7os{`=$D'&Pp5I ,\7'u3}8ri+=~18x9;4浃7$J;i^-4o`fض\$ gg2on{8h=0R3u^[n2_v>"Lq[t8YIP6rFTbѫ7&Pd O`1_urX7C:2c5PP؜UƚH,)F5B&vwU DDBUyY}6Xܻז+EUõLѷύg:5^LWMW"c=\BJGǥD $4:EȠ$zᨫjwM ԦhwdhQڜO,K6hL!J9rs:1}޽Ǜ۟7?5&|=UZѪ}qqQebu\[rIa~#S޼ץQxR3l1I):xq6TZa,ˠ4 kE/Oigy8cYh~[ט\UMu1s;ߨ|X1]_۵\S2kXeb4S_uw[o ҾL48 պdzh͏5]>K0cϔn 70![ӎx2uQ)i4T. !7q⣽l)7?4L׺H3S" +!)T>M=mF!|gpO4n4/O~鋩QKO] ?w o=Xѵ.yMZ)ۃ;33V)]?X;z-a@NUXId& G8 I*N )BŒG^(y!ړ`ֱkUESEl"Z( +Um/5?~tOPR4 (Mf10U|lxRoK ]<ҽ:DZ+ Zۼ,uqKgbABԆpyZta0%R)S)UHg `\)Z~u Z\e*ײoCcqoiҭ{FMF{,IL(e2ǯ&]̪Y9ʤA)-Sl6V'QeQUmnQUv-^*SK%.)9vη 4IX 3!ÜOy :g& 4DF\ d[K9yCb1B~ç "-uxګ,T ?0M2VFg[XXg ,̱Pgj)Hp|KF(YrqZKnѨL/hIphTr3#Sbj$ C,P)aUƤ֧JdBЯTL-n0-:R/HǭP KB0&쫔C@yЊS$[TIn+d[%՝c0`@BIFJ7ԋ@0Z"eDIrJ*Qǘ.CIϵx!~:uβ_ftݜՋe^MpKۭu>I_wڦaΔӐho>0q qQL S  z5ywq~H11Aa5Vt]\vt7SJ>?s5̕B {-]C%0<|-Š%:O oOw)fӣѿ1 \uƗhIF\a#԰q=9nظ EJs Jq!ɭYuƜw!)rdP(2k_ qv٨u8B1 m6,]# l "~t{"0C`·7_F&Ѳ^.xi/ _Gnx06̻-`H8 He3"VkyoSH: )H #BiK\ͻWUp$[+JU_C,vP⸆fM~άo/\9UBy3Jgk+L\I'X,N#I] >tr+9,5l.9N@kt,ML41{8 Wg*@2CoY0we$LǗ_4;D)6㒝 = <ך9Z*$W;c.(>}ٻ6$ 4T8޳$8Ć1E2$GW=CCz8|I,Ù_UwWU׃/܍pDI-yO%B1,X3E_DE_T=U)Jp IEmn kl yE}6XkLQw=mFӦ/XT]RJ8Ѵ#u1Y2{R]%)eWg  *&XbQ̝GQ}RKl!nmMx@GbSk rmMZ1y  0s-r bɰ9uc#K+HiVad`Muq"]jǣiR+4 ?gYg8xd* JEtXn5)itn7dx\f=0l%nvSVVpۙ|5?dٚM:ShoN5c>ODIL&c̈fq;툍і|,syW]Yr$/[DPlӘn4f,4Jc% Mc]2X-+,<z&>JKݴJ>YM_}8Kyݛ $w60/fޗ?6N-%lOdϋ]ى}hTS+LX:c,{4 mZu%(AFۓ~4?`ç^oZ3),9kyI,/p1GHvݮaSC`rE!%768K@1DP#|T'GScfk4i͖\(nrLPm$7 Æ #̸՞;ZՂ0_|ݽ}w#5yފ_Ys$YqKt(Y4x \(܋F(rVbR\lȥ:aqRn=!< OҕKI-)쭗Y,s&0 s1dkޱV,NJreSAIqo$1'Š|0 ؒc9 sh:s4l;dR.8]a.Y Pb܏TiaD)ySjAaBI<' )rLo"pMC&o N0l(Nw en%'DY',H' &tE/QE:qD[kA4+G<֒sEXő ,*J.Et6}Ƣ ԃj,z?SR)d:m>ȪmU/=6)p E!VJn5SX#HDu~m'DSUl[Rꅂl>"0sa[1B͒z̾OuOF0KSaCN 0OI͓^@yfWoF?>W`:_[z[2ʸ)p%i7ssŗwN/7+|?ߦw w6ry{񟉏\Y0+2m9F 6_L/\1dś~x\24F_<X>Iv8i&l>E!/Hg> 0iƦRG߈|M0~^ GP\ ̚MPz˭X.;D?]7L[ɰL330&FEXo=bϤ<}ng0pG*_VT9s<*D}Fi,H`{4$רZR AI,W+wad{&GpotWK$Af#JzӸZX*4.:sǂ7 ]j_㹨thjkSVI-jJϲJ*T\;Xe~qOlMס[۾LDQr&E 򉦧3n3w>kG! ^ ƕA*'Š"pЍ`Qhll U%w#^\WuB WP5<; C r;"a k`++0xJ'bD2gU,S)VLlŃ,zPz ,Ptr!GiJ$l.竫SdznlyBUygVjyO`d皼g\Yg:ST  fOv7/XhWjYon-0 mka.Y%WnJtn&z9Pvav|Y͢Ң58{q/jYuufN3WncqID*14;:dEu_ b޳7!%Ddo"񀊋E)R>!ŕͻm<&Q W9*YE@}m6g6Q~w؂!vH; $@=2>x2z1q Ҫ/$lw!U{a"8|L:RLš:RL*Wå_./&E7,NQ*dB@N"!8[h2.+ `[C[lïE㉟=x, F=@onE|yLX}W||쯷#oWW^#P2ǷU|zqTbxCn>>`zcZU9N΋KoG,L~7!`Ѹ<7޲@\IRo;ŋг\א9UQ (/qQc|hLxT6t.՛W%]uś,wd]1-&o\C z]y t'E`wK0COoL?x{śQ,ŗޕcۿz?c n~guͫ^x_JWv)>q&4C/t>굿OG"\__4OHĿd+8V'`_bJqi'|[Pj35{de^/W}Ȧ4~4fN5X)}Qh^7k/0ݰy7]cS9X HAӃӓppW0ݳ{li/E"{a3AVR%Dcu@`Z!' B8oS߽{{VD˗kxvݴ\U 6Ռt[[0JG͊rR)и(ޡ![~Xm_A+<@)BTï.#WjMW3L2ԛqg2mʈx;zN7)pJw}&/̙{kQ+t;!Ud{D+T)ΆtxOz{ cԻq;JB1Ӻz8iLn0ۣ>z#յGuQ}GuKԥT{u_Q{PẂTqc0"p5ct[V!*tp`ZdE??jjk 8ȮQ$}QD3e8%"8`?.E)jZᩔIeސDސgLTj?1_d8%EZdE""[-"}E#5*dJCB~oU[`ǤKቌ91 KHؑ._B<  | .H$zصˆ6Zj_$o֮j:_>|, Sc3cYm3iU^`!xU=Jֲ3&".5X"kJSV'B֠j^ߜxԺ[H: i~o 1 r92fQ1cY 3Ĭ0w%y5щSq+CaqBULX>j?n >DHաqV>V.+U9 h:\'D%='cȉ~pS><觳(<*S[$/:Lh SRSzr퍔y0 S\Ppp.9)%y)F5FraiKm=.МH|G2P 2#b1x0Èlq0-<#)H-Ւ{ER\ØI^6&f+dGi;ٵǼmm%Yfڞ"%(J(ViwAQ/DbsA(^*Ue ,b|PIp˃,|v̌`n`&\vyϫL}H׳|bUyPʗujё"y/(/7ӧc^1/AiԶYɍd&h08FTDR SYĈN3-11+y<^5.8U-Yh,Dn~]aV:Z^J78Og1!P1[pR2#9 h`Gτgѩ$Yf2`AkP+bFB21)̨>[ǏُGÛm#ջ87Rj_)~XXo|%'v[Vz훻6NU/(N'KܴM:?]zǫnhW\Brv}3[Lo2?=)iM{Zι!9߹\E/[-E~韹2Sˀ¯ԷycW0<ɦ\-/wiTqi4s\eiÇK3ǥLԖ4xt 1l(8c3m 5,r5Le *C R/dc)Z_nq~PYI\%w*qKdh.f[Uy0&( k@cv9fF|f\TR4i|(k^ԙ~zJPуGx՛d/R_?秄7ZUde@&FכHxKQ\#n@5Z P-6J+jfWyIx5ZnyYf5=q L g],Ui F͟k0O!#k|mϺo Lcg\36ZoZee:~*ety ւdO,rΕ/>P͉65\ru{wb6wy0;)PXϓT?$w?/ihJX|s;K~\@cnQ9ui:b/NmO:}y=w!70Tn=;1U~hX+rmab=}&qy1(;?oͳ#J"̤CAmyO@x% | hxXJ9 19X6bրZ@CǙ3%`M/l-_ M6:+&_r|9cBgr)\p(J* ܙHJ _,7m5^ө+_b':"5noJ31\>\Քتr c<10R$!YbBy#5 !CɜI[f: D2֑d ƨ<(˓6a L%V*9"˟x"ȋ}FQ&!bۺM(e4/$P*9j(3IfA П/@kỶc& VzHR&g6[@Hk6Њ)]G)zC7\[hIqBÓD4R|b48 8i^dGa/(^ic DvCb)#,7 G,zD9YM^S*螢O\"Y vpnEfJ,4R y&fq<[}J{UQp `Ȇh,RkW1-4e]tDrpGV]1ow wƙX\T-Y6 @k54gHmVl6GUe-]m*=m׹Ke x S6%{WުīXMFldS(b66*XRq>*aD!Ц'-ʭox*4V=o @c_u4S C#g0./GbUL#:&)(уQd!;# /cx/jw2eRn+\I=\iɂ6.TM j# Th6<(LF$H]AI%y^7vTٮ&m:_۠*#TE>X;-a,ag{}s*~N.o.x1*cP"@P ?fF7,6e2UR4E})ˌɍGMO=(EG\70Df(ꎗU&TvymH=Y1҄ mIux&R X?8RfE) 6ݵHSTtM-*κEQ9^;Do1L܈H FYv`^PTq(ZD(՜GjڮQvO`4GQSR&{$(qr2:Gg;|Ml恃ܳ#CN# R"E#EEmdB`6 5)d{;3?~ؗ)55Reef$Rf4 eb~+.f›E.J=oG))L//'OM.JWe(\yNaNJ+!$^vvJ3u67\)["͜tvd&O"_(+joo ofs?2륅{x9+B@ڟ?=C':!{L4ё'65"GW$zr.٫vzߥ(IYE"SU @ [s CGu_s.Jّ2)U4W'!@:{b\S}A2(H*(^e^ּ̫ՄU5RBp6UL[ա$ c() &&AS Ec>5=:CIS| %LB@3Vqt0^08aH ;/2Lf] /$m0 O[a)| ]^tm^# -߿\[bZ o%Y0-;<VoP I^3W?hX 0T͡6YOjvHWΚGyZ6WZ+C)=T&jgj,XwkXXF-~sk=>vxE5]ׁKYH7/v_0F<}ʒjuiQMm$^X?]G(^:j.nTD˺1^ &_̞ (1vU2#K= ,w:ן;<_m`>8ܛBkpS?W[7EAu*ī O<'-d-0#m.poשL YPRKa.y@m(^VK/{=k|+5(T; =:[VvԔՀZ2%_e{_!FfG $R-q,Y\N6ȖdcJIt8:Ӝ/={&ziX.M}-;u3Y\z*p_iM8/u}F>pcPg8}ܮͨw<~[8ŸS%<]e_?(&9-8u2rzyblId4 W!:e*:Tn݄aKP jTg;X2EKIn+8VZ14 W!:Ņ|sӺ)"P jTg;Xg"Y>o6xm Utg*5~W6< (GygLڕ~ Md\%;}gsjmP {V%gZ۸KH`.<6u+%䒔ĕ}CJ!b*%Kh_FB4'HG ͺ[{ʹQ,<6g)9fݥYCI!:ӕƒ'9Aƌ)LPmIfB[Ny3lxTxXHfjO#Ă(֬]8DA1"SL5v6TyPQv q13vq@BsZ=7ݍ)FޫoE磫/5L6$mCHN@"ʗgI$lCRHI*HTics$ lRBbzgܙ6Hdx-zq$ff .YP Sj* $<Pmr ۰mXi`%{؂IF=-,|ՠYO /& `9qZXʷKDl"JQX:8|Vmt.0WJJHneDR8ź|5|+S֝1 m$vaeT(g;PYR(n&msukԴj4NGMv3Y]EL49~P2-U|2 ~65ɗecua(ZNX/s=6J;D2d3DYKw~wu`YVEXqDs_LPmEL`f5!RW~a,F*9@W1>7ωsu#jgy9iądJ|TKdF_JAb ׆{uYCJQn=|پJ3mj=Rz[XP` K /̉ރSeR#60WYn~즓x1}s1GD;1fRrXs29%m82VtVC9%aj]羟-JP!iF&2!19Wy4ϝ 1!$)|^PA?r6a2@dCK=ҙ0l%; ෙil8?;~b㴲8${%Cse m GZ$fOc4Pͧ^#~1칑-G#2Pt.^8 'cosÿ~:Vm32M{}s D=÷_C{ӫGzc^|7u4~g4vwS<2f]h1Og?mşf6{?+? H5=ߕ>9!\ uND O4L*"T+͸Jb,gʇ^8d`0:ٺ6LMa!Y?LaqU`Y 6拟FLG`e MB\Ŋ 4:בkU0 C|b nEAKxQQxg#?(_|Y,!zm%->O'a{1bYjKg"דe3PeߌƣyO35 ުԊarlo16&%\>Ps0l Ѭ\nU9lKlahȌv}u][GaqkFUEWū/QzɏKDhDVȏn'Đ_n7R&|\YXʜqqgs^q{r2a*FcILiƘ7M2xy;dY=2 s* ʩIaȃ52f>p dK9;x#&;=4AόeLpPsKeaF$sY`He9EVPLOkT3$Y"55 YzTbڡl"o۽3e _z%.6[z]xyEc ':K?OHst Ɋ ɁBK-par7ebp[QcX㐡Jk/~ζNvJY;.`Z6!hbJ uPau<¦pE}vm- q6u[FάCywt,&Xɻ*h~w[Kf^fsJ@$c?^Aeٽs+<ǡlps7&wrK`o~%2a׊aj1}Jxcȃ۫xr_\/vao($${*?8`[LC9:D6VJeF}|E>WXɂyYИ>!#4+N~'meI( [O5Gk+֭޹Ȼ0roOK{qrY4u0礡Vm9ÊƣՍ=;QG7byA`Zbq.:~J q<#nh8vpoʷJ|4&=)D5.}K1k.=\?:GEgΒS]G˦yX/nA%itw`g"?ғ?\H3vAĎQG#LihBjE4F4?wn'byPEtbǨ;K *ܘv&vK!!\Dcd MBn<":cn-ƣmnфVn)$䙋hLqv#tm-UD'v:ڭŸ5`ۙFZUg.12%pB ӉSXT1hϷU5ݢ RH3 eJ0kXH%^tu/^0FF%+*-SS&{uM0ud6}_|rW`*Gj%kV$W0 *e)_ RЫ:=*K;h\d~UE.CA)D!$x1.pqrxnPzTd>Q<׭\v[#.Co} Kh=${5kd Nd("sFx5V$Mli^Vrn~?^^0pfkxjjS , %G3gO_'}?Lѷ~K$K+궿}ץYh>?f2[VMfg`4[CV}? ՠ/"+ H۱sK;lF2ȧ+,|o-lH.% M\V~SK?~gb$Gn{x~KLM3hRRsLM&1Tea%ѻ1i=#$QbDĔ֑AXE4xs;ad<*1)&ce; HA8̟BJב-Ti(=#C-ॹ%VE>E7 dm||v>{#Mؾ~ؽl厸ܥ]ϕו=f3[R92{3V[ϝ[i z#8?wF:qp<^W;cx+tQ\fB8<,'k+M]jN D=ۼ[v3.Jt ]n8|l2K+IϜ&x8PϜƌR[qUE/u<4ӫ͝h9HQgB)Ce+N %R}^mF㚱6i -cѝJd@Zh\-⪅"L ,ڃӚ)`L{,Ь<^6/JQ5"tR굏>]fk%w],V z9,Pu/Z%Uޕ>ro[»,}N0xgʃ N2",#Aؕ0(IA[t{h5zkG >`JUZ2η$hԥ'TVA~M%=9̙Z]iSs<-} |#wzᲈghКtvueS?18 b}K߃Y/cofgl{`z.&/d~^SJ4ju[ay#}Z utE!ADDD+ 1 A:Ťw!|BWٿgfX̤ wwp6"/"#_htKyzZY%+HEƶ75:~"˿[)̊&h͛8SY 7^HO#L*Q_y(L"$+tG!Eic}coR(%Hl$d4|Sf ] Ͱ}[0>?=!>.x9d+&sB ]@5{Di=EqWB%!qr- YY 3*(qe2 _+z쌟h2~Fq h6Ѵ߳J&zKɌ/-sH BFHrR M=D'?}Nڪ 8=ENn#a} SS{JtGw)}w>2D~zE}Fvmb _; Ìh2!B+V'o @4P̭k"sK$}XOVrG7\qՇ}⾧vRa( ҧ'Mn}WBp47X43`wXAeE;e&"X̖s_7`HpUw~$hoe@ V+B6vvniӧ4ơf58#RPUk7JavhB` JH6 W~i 5 FE{Hׇb-h ^QOxҁk}[UvƤV.ShۥOu4=(r7;nX wn+ &gD{YE?|aY];sS˃bv5aLgY~|#zbzuy}W_)0ܗq؎xH;@5sg%Vw$Akȑ~-:2\نSC$l61Iɷ"s9u!xV,q-Z >@s};~%J@rV6{`d(R_]~aw?W%͟ϙ>BSG^0ZvZ@nxbEiW.Ip_2%_-]fO i]8=m(JP8&1bII-C%u`IKVpl2΁*IX܆B){v10>GqkH5CrT{kUXVG,Ȅ;wݮ( sU9MT W/K{vO$*D½U-xhVHK*_IdzKc)P[WCl܍'vkQ5@ (+Sj.L `{+ &H"NZG\@ֹ SDOOlw \(`x{޶mO7=ط^پƐ*E8oBIEj86Hl"2s qL[1IvޕJ _R@L(Zޯh?W!H/n"`huVrpV%?:?<$k*1Qf|L /`J~BjO~8bG>+NҠ7}C=M/e7Gjǫ~r7)y2ykL@>5Q􋥖YnV)xsgW)y/F^dFzYgeNN8*cǺgÊ%cncЙ l`k1=u:RQۥ KQ{܋NYIZ x)mXzLf6zJqE a]H(/weM=^"L t {UD] b~k#w HeH6YRTH`%UL[ m6s/ʪ6S 48B>b7T\7HIH|ż(\pB@_>+d^pn mSh޶;rXgYnu'cl+P3h5Ͳ*r) !mt|^~!4r~ȪS.9hm^S)ɷc NSTvl PDe1PĎƓ`;{UTWSãEn#;[RÌ1RqUO0X~ʼGb5$}v:&+jRcr7y`ళ&:?FրBICZ,pA[)pZԀA_;TX"k+6H0OG!GL5I(18lJ hE6g7)wbO-wӭ&bY5CA |Tb*M!8`H;#ϢC`|Nf.zLL瀂dBV|u9hj =Z58x]ɥ$-v+2aX@іNvoa wPuxdKj5eQ>t&#"%E>:"-*uʇQ Ժر%{C\i~O_@EKb0hά7IUE0&7d.[ FC^AoKu%GCK5fxJہC_2A}Lq/QP  .p U5bNfm$ iA<<ύgͧn'^u!^JI`~f-M_ X^)g5K)TSB%d P`B01jgLPR$"B00X-$tE* GJ &p JxS )k˿q@٩ C*ncA&Sgf إ^_ػ;t2)"2[,̋wļgj`闆!F]o)İbk0Jzw[ma 1kۡg:^[f& mT(I$KlP,8 *"AȨT >P:1sȌրaQ686|qR1^>>))ƚORhwv,6Evć&gA;rW'R#;k4GEOmlO$6HGzGK{;`;`U` a[-u@BLXk?G_"'`)ӥ Ϊ Ok2炂QS> 9buԜfbO=:Ӑ OA4*S iyuC5|H:Sm4u^ YM 5t!I`[_ =M%]ySdm4Cj1֨ع tCGs'='V0fx0\^~_|Pm|c]cKQBCJe-0P1AFaLzY *o>eaX*[dғ(黟!>SǮ|Wcw[Vip&ū-C 8隹r<[6nԧEw9us-Oc[f=FgT(hOv(fG|{뇛&HoAN$y.{i0FDE\c<BI O&Z9fƙĬs4 Pv}6<Ѣbrs6ɺy^usPyqвozfx9Vwwys_٢}CM7ts[#ʐ~k](`UlaγW]) ?^f1F?Q?R;22[K`˸5* Y374$w5%KɨɎh@HT?r?(Xj:7tڏzfIݮʸOda&J9c+Ž/,?b(ģSQTĐ\ $ C-6/(k@/&S)RJ6h5?7  ز}':y jВo|qy=mʋ`?~I N7q8{5xH;^靀7[0ˠZ[Oê×MYYgG82\k2zkd.}v#șkSaCwxiQNm{e΍ɫ7@fa1yZXB![(a[[~aQ- 9Q2 de{b&@ R\Ҙb4!V*b ("))deH?{e܅ٞF{ m;=Ub(8@M@!LĐ >/nvo l/YhuN$1HOW$dHV3U?cL#{4bst 3 >w{b^v9X%_3ݦ mzm~gi4o67U3r.i?ws;_=RgVɽ K5$ E-y6dž pg2$Fb/(os<Ӽ,@++Sx:lōL7>VSŊzk?\6_ꠋw:!- om^MjqGiK}dE&()=(:J.o~9n>9ʧ2b.G*h{dۮ2<'Ú[gq&DS& 4%AX  =fD97 첼؇q/a$}JXVbIUK$4PL@3JcJIbRhD(1@'CeH"@L$Hlj%@X0[ $Ur. ?fZ$%,R*d- Dr~ ԰"@ ES*TLB׀RD`9Қ"-XD0$GDjHQ@(Q$C$hgԢAkZ B$esneIjlT! 'mصrSU\x.< `wJ6m\M y;5{N+=XGAj`! !g nI%æd<///E#(lC7[Ik!!"a C  8Fp~ќLEs+i_zF9 Od$;IAwY5jDDBi``4AB!P"`PF a[3SR01kYq#i8 !*D$J1I5?#_fޒ>pC-cO7:[ܥD6I67D/12hvmʌxGH IeJ=aitI|__2)hX%:yJv*`> j:H"pgSP() LEH 9dq*w$PR@9!( !i$C7 ժ\z-*Bg:%$xk2eWC3VD8l˵)ksV_. %s pS%zJ 8b$xC\sN1&L]۰]<0&6}=6B^:yy֫k"DO&T}L ޵ ܥ%JPj'>هrmS(-#XbQd/(8s;*fog ۫TNOd iY&﨨qW *F:~~i )Fy6#wq[7gEMfVyo nXpT{p{]{k|®7uR[5K<(0T>M5+ReZXmM/e%>m> gXd Y+ʳ-=kqZۉ"_>N` 58@)V=:gp7tjjףhm=K|VY5ޣ~uw%8Z(=D럖#Z<ݺ#=܍.Twj{uT)ƒIUlǘn)&"iQ``ퟶe]֙Pz``yzӨwjҨpjD10L]Me0ٙOa5. 1:zaes #8m ݛ7%ɲHrGYg<nW+ji\6(֗?©JAS鼃}CPR5v/*5=䋃 ʖJ=x El 6hr{&PV@q4R ]E nA>^Mq̂9b.%'X3T`#T_(mRZDQr|uS+pʥkLFcVI˱zGևݥgA(I)ns$.70) $lQ鮳\7%>ޞnH"Xx?{-QQ8EHH1:%E e!Wp33.&ٗu[GPh%:"nfAi2G4J ).&`"29~qcB޹]Xqrbx A eo'RSe;54 eϗ_DUY yU} I| "kJ6:L{Ҝz`~ W}Y;tw֍j IT1G?ԏ.w]Xܱv-_/&L!`<q2ʉ "$ eIq%FaS9EK!u3rv[׆neiHN@m+KݲwĹږN ǪSwxdnJ&Ioy^yi祱yj:틄&C,CPf@,I&!DB$dF$47o׆۵捱{=.en%LsNP4tU>8H&=pu?^It]6}Mƈl{ l jj2P̽6w˕Ob<^>{D"U{2ŭҸ觉 V\Ej 6jogOCPrd{NjgAGo?LdyoYOyC} $oF" !aD;xC^ߨ伲5ـQyp S.F}^ۗoH|8H }mxq՚ص/nh_JOB}roF{%m`a(P"=?.W4$ݭpQ9tx\=-- \7ƋI0v^4¿eaY(xט06΋_+7p\J u!`mݞ$BTۯ@D+j(# $9bF,ԻU_@4cpO'gkR&d`]Q䖭}Ho1eET?\/U~AR.Jm/@^uJF, SH%y}Dٹs<ݬ=XI٧{a =ƫVis>h'|r)_78Z~DYl,o"_ؠ ZW<{VQ(TTHJHuGqzuZ BnπaQ?:kxEnI߫Θ0\M nm ^>+;Q>ejIk1QL0]ܧ;y,&e 3)[j77 }al.JSGR$i5qfUCoR]]Wm)+O8N/44JՃFg]6. 4ooOэ=;[垝zr8j.#!T8uuDve{0Jtn{Y ftknu۞@7w[@xuw]D_vK ºHH\]Dbh}W@ %>[wUM*AZ/d7DsJ #ե @`8|Xm2'FƸ;%Edu'p|vxo&2Z@~ 鷲ZJU77_>n88@^W_ qWK&w6&U 39*;=ReK u_爻^mc95lWWLA6H%K1F<.J)h/ ?g5lj岌iDYfauÛ]rySX^Fs-Ѝ~y2K_z3+LesiBuƉbņb 5;51eٱ|wʡ>=-'[8*Plƞ,䕛MI\ϻ9OfjgS Y dH%EvǷ,*`ԵA~@w{fud*pm&ZG#Z>m:Zq_QF+>,䕛h}THkOsa8wxF@싲l4Rdq՜ywOf_L.p~L=z>HA G]V#=E"r֝MӸ1S>Vl/U'OlKqm~jc8/bY{/ 2|\A^ŋ a >jv&0HfM} H"NkR+]ƔL)3VL!&š˔BbjaN3()[ҶXv/i9# `@F!J„F̮9V6^amwFhqkg#%`mgBqr?$n~#m9 }lvۭN\$0׹Gv0 B Kt#ǻ^7Kv w7W%D8))44!€f$D&SC$I1ARRnꕕV+]#ֱ 󋉎#$a042I qACF!!@ P`.2DŽ;XW$I˜SʣL0qAH$b$< JIYH (n:p^&%hKjoVߣ.T,h8W5|5xZ"÷xUATMA?}ng󾫈Tp;t9t'e͡| ?b*Ż>GC) ?n?L2;R#\ͷoW Jr8-\oYj;RP ;{m\,m1=NP'>(-k3+${pu?^It]6}A# m)߿(iK›]x )kSWRt>y\ 5W=3 z;G埗s,Oɴ]gTH{ht|9Tkȯ7|}yz`ЍZE3׊GU.JbZyO% fY͒/#Hp>n\礲S973."I8!rJň\CFTdihPb&f!UFQApWF(:GQՒSX-6dgNæbo$6rI(Xj_>T4ݞ&fɠ>,ꕛN8¥dWc5E ~6m $6 QO^$ ؕ `ei`mXo9D@ep,K6y$$ N얁$SȀI!N;:A3< h!&%Z n)5"ʄo򏽃h0]H")0wK,*ڑ#=@~9/ZR)Dž"٧"upկ/wZy*)A=(3CC@c*o O 9nWQ J%7 e F"DI J$vfMU[,У: @燡Fr;?%z&w#mBllEhaXlq^4z:V H[wit+¼J֫ JR`* 0\<:Ra@h.Qf/!e@R&)eG4`.ٕ׵TkszӨrNzF Q:iT^A DZ\$U-rAPFU/_/&DeȄAS40"D)g:QDi`̒a@H-͇NW6m6t3(k@v*!V>%O$΍6u,wmH_Q.=x?ROffw?ݹHu-G3\~%ԃ@ (Euot"<@Wߙ83ƏR^>l7]-.>KEs^UFHH] ` y3fEyLZgS`u"GAxjxL 8:g[ع :٫);}YO)دfpir)Exzؐ׽%;cKl;Aul1D#G-{|#y ~6drKy!ӦPC&8}2AQ;;%#=-C:J0]z5I G;Fv M*s9ρM*/$ aDQb]Q 4崚{=ϭ,oWuj/:e)jaV]#lycҸZ-'+>!,^t; w` 4]u )E1 ;x;6b)7,HVL۷+ ݻ6H5H8/a9rL %w9MNRn!L%Id2F{z 'i,=6I:F2ԟ4v-]Ud̈́d`q":]:>Lכk $hS4 1ӁgHA7P9=7Zҥ?Z kX8#Ckim`1tlˣ K0gÈ"Q[@W[thEn Hy;K %(F>H2Q:DG]?|!=-d%+@hBLjs RjL1$"$z&)!%<8󬍞SJF]A XAjc#8#O>xX$YLvMܕWY=3w{MYم,Xv3PI * <#6_; Gh*=b;&.JgV)̌-,,mwu?_ɤZ&ƖWɸՏes۵GUGBt+Lu?/6t{|/b4sD|\䔎A4͝?9ɐ!ټg7=<;i7dGZH a )=A1#lhA)@5{v,3[œ1X t. mTaޔR5^ZRe ,,a8Ӆ[gG.x=^\0uOq³eo[ b%ce==TTx:Ⱥn:!Tdѣ(ALswr/znzgheðI|/*8H..?X *JӏxCEpG %.2+=)9V!%Km/ f#t})97 PL@u)*D22ŵLx`-:p $>PR!mi1~LJ yŶ3hP:ŲN;bhqڳ֝quwa MXFiRN پ)έn5ݯ[=' x ׈wyC #(񘿐͆FR 1!FrN_F;ɏqU"kQ&+ul7`RۯZ,Tqab[?̏O޵𮭇wm=릇yZL"ƻjbJ"NRVajH`%+1 B@ TRK7wżJܾ>t7ۍq9!y\X]hfU=duwwu*QOqC+9:ܸU%%PV Rc'2xAJ2XHW$,,VCi]Ayrq:^2`,6x*G:7.9 M0V@Z%}- Bâbg,Lj<mXBw1{ua$jˋ-"wCWd7`a2]Q B (ѥREYqer+A$0t 0Qh.XCG@İWUjYuC~|"M)֭l; kl6OOu4#a^Y/i+\ϝs$?~޽}e 'NNm=1@,(kv0& }/|T͌{ua>Y7ty??ߛ͂eL^<{rwÌ_z3;7LӰ-6*v(7"#%UΤٔ#s4q2TBf:Zqi~Tŧ%"N{٬~Q`aNX:T8a !èO*cI"c)rls(x.jrZ?M+|76ti>؉־; W[}پ4j^VqV}!2l-c(zK3Fs fZep i^(+%+4)5&D%0)X lg@ٓ匿,ryBĄAaB1iI5gZL`V3\hQL*IɁBAlmQ+V, ES,\)! 'G@4J/7Db8m}]byQM)ϳR"#Yrn|0'!R#P.5$+XF{'7TZ衔Ebi7Pxәa|f!{kzqcdKG W4Y%ռ*TG="ʍ! :1IC3 dVyFBَqlQC^(@X UHX-$ˉ,JPA3L.xf;"*ųȽƀgm<]xڈy0"n0,=ȿwBe1eAjj1Uǚ?׆'r]H)髌,Q"yՌMe% vyi+a=buV@f {N:۱H!$y㒬qa Fq:45ej0M0ω(/js^ϿCz}~}֟楾A,qb[98Ny;m&3_Y@3M/D]|+$gyY.P( ):"RyHow@B\=k)|F&A,\%b}KY7;C81{cf3ð76ۅ{o{GZȀ<i[U狷G#1pǕ^A50/m8臭w?r~az} i|h ArF+Ƣ݊᧓xx!̕0D67 .Z*oo?/.YO&w4bMnǚXNq:֑.k2]`,g<n.rYO,wyGnA83'f .L{&ഀ8;V 9wsϭi?Rf_u9 nevտ4?pFمBS" Na2@}fL\Pbw& ׽/ l^xͧsq^O'G4XC3맓G1 x~w6O4g+|dhKCtIw #Kf?$3=Khs8ځ9 IgȳCȢ/4I{e"љ4W`9I3{e%#t! gΆr2D RG=pv&]SPzh.koPW٧HsXOR(!bC s-UI!ҬB{}ļH{^R CfM ']QB f+^ ,,6V(I=nIBe1(9H~hyhgf<-E]od*XCnK"yAe֚"Fmz޷|<,SbY:R?~Z9V:kS0vV1#ɒKQ1#ؖT &ڬ3!@Q$1`1(ns<$䐃s"kr|OP~X@QT"OpʜQ>8,NnGEzS"x)%S_F X/B5@H(N,!,32Hh12Xc9<>K!˞H%=㘕ȍ㒹(?H %c搁@\`'Ӌu^Uwo3bu9oW `#pJt}OR1FY8z[2.YQj⤅x.Jш;o4`RmLZF$ll'1oڌ<=E&"ѻ~z3" *[oH*V}Պ wrV /Z}ՠ`mXp5~`7E"Fp>y)F2g{%P@ԔD4RǦAZ7:vr'E`TrރMeÃF8Y{lbXZ=q}TuReݞ1҄  C’3OL۔rIBZ(8RȌErA8A"31.OQ\J6NZrZZ׍Yݠ;uq}i%bqyC-Z׆`kck tq%aD*"kA̡0 j]f I PR|Pd9x "zŘShDDk62Ƭ*zK'\C=N4lz~T_!oM۵>w7uW{j}0|k{?B C oowGȷYaowGȷYa)JS#Ώo*uK5MB[]ߺirE m9ĝ/Wi٭g4sz4-)#5!}+%YdF%HtrUl/]Iۏ?]\Lԧw4htzCҘ>S4ZX [M.ƒXeY24N:n%jTK|cdv-l(4%.G%.Ʉ>HE]:A.7 K]ƒF+ j4%/3(KYԏ/%̱|o⯁<,|W? [87Ub˰կB! bQؐ(FhLM7"Z•;HoͮKFR4J/ƢQ6M끃m^|u "₠:VS=`gv9-}R4͎RLfGIIQo4oܦfHF_".U@FbUW@,Bu8kpM75nEA+̳]t‚:x`\+!0y^$S1*"}P/saZ!Q:HGroW z]幜6m;hи(ddZIj/# "qwUxhŨx*B:U+-fقV0d–j9/Zߕ7hTsəVy C6L*:T3w\y:P6fm;Ah HI$@PYsqM!q\d}І)d)Gpn8*H'V k1P"$m`"8ޤЃVrv%s`v|ǶH߉1xҁEvtw;!j> CZ` }dq9P6))8d1fw&J#HF! b|_Z!)M^R$ >F.d}CTY+659]tB|gI0%h|qX"Oe c-lS|u_Z!eM?{' o H9Y+2*vhJ>RkTADIHT8'JLl3#Jqm^iUH+˧p3PѬZ';C)2y'\x݅'^$DH@Js2 %Gn9VŬpd17b?*O.R\S94N Ht٘Xn#e q)E-4Hs"F'J< e]D.|=.w՞ۺ>Uӯnz#% }x]ï$}Q/v~$́ x~b5 k|<>~Y;\ON$K[)V]~V/GWh%*Cct`tvM.OOiDd斳BhVRV%?^K$%`}ӀnKV9eb'P|*ʒTĎy&$ŎXlSD)Q.Vs]NrvXʫ: eJ/o)tDZvU .W&eH@2xэn{.NSAzc{]J&ߒ)HQ[(ὨĐ]Hz\"d H`;PӗrD=WDHBS!;F QȑE _X@FCs5a2 )ނG+T-CMi^ki2ZE@aB)Yk-|hys@jHpiPMmI;:`=>Qq$= _U~}A&'Kb6Қ@uqT(+r :9a)#h'Z:">Bc&4Z4H\Sxu-.u(ii<)|OOnOPͦ*}+E)z6T-{e6 4{_Fmo7{'_Ny;zx>k#tn.Un^/d$Ťu2χGgs7;\ο aVE_̼RQPU[KOs4Lw'L$ɅܺQK$w $٥*2jH~YX9ҨnN( >F,OcxP,g:4$nПAf{Y֧S6;"TXː_֦YH?ܓmG!?3}7c;N GiS dp_f%O^/4i\ }!͜T7a ZFh{zŴ)phowƉ=儫-a/p6ȣLɫ$Ko4Q),+߲- (PVXm? ahxϺV8LW'"taZ?ʁ5JaY#_I>b {iW)L$0|I`zk̬SNfY]Ƶ\cJ 2CAj<.ìr񒩻j)B晃!,+ǻK?Oᑈɣ6-oZЎ&w^'ECc(f/^TW1)-3G?WQ^&#fjRWo6O~n!hAj 0? 3wI%SkwhGv룮]D}̍4s{󳫠4ْ,'{̚~*+ e$e79p0VO}Kw(x"֘EvL\Ch {qqgjL,D,J(eߛFJN\KN5zzprB 0 |!u_H;?V1fxxa~Û޷CcQ|<Q{=LCo0@P4a>3:Qw6C3}oE&>Xam1?$h!yȧ2Fs`͐=G͓hz3,> +S2V|r?h+UԬsJrr.H1ݫ@VUxZRХᚼ $n |i7{Gl| >m,n bRd ,R g,,*//~S0 D˂ xe3A, HSQ0o9-W`To:G;ȵC-hc}3!,".:^Ac#Q֕j-yC>o;5MN2G,kLJ*YFM j ? 8 hx:F< ,K=C85?=l5AԀ ΃R:RTu "}HUE5c/_-.Cv2k*a%Tδ@iD9e鳐"Q[}ʴ#^0(g᫼e\\FAV"1&0⃻ .eQ1O=`kJf&hLbWγZ&+x_K`vǒ yOd/q&Va+-s;TD1_gCg?>cT-ٳdeƮ:wUsC|pUajT[O2@,i)׽4ír^˨3/9X2 NC2%`hIYh[Ybs긍7TNʝ_,gw+I;F؆/vS9v n?[o9hJv호WҢ+īe^ˌQДwU Svh#];N|eoWI56d .eqUw st҃]>|^y)K%&j ǧbҾ){f6ލ?yl ۷m'by9} ;()9je$& wyd wcM%![['`!v N)h["YlS^LPdm젊lsIVTّ/NI/ΟPeV\Du/lUh_Ī(b2pgÔ M>`RԚ KѶsL P!G(E̮J'gơ<p;%+R6Ys!AFNn\}ٍϑ*ys){ž#'5RF*ճd8Ay>J%#T{1:G~W-d%'JRsΑ'ON[_6.mYdybŮRu( )jZ:ܪjz|0){ژU,ّ I۞WLyB%g&orm#*=K}UzkG in` LRk/A3e6kk˺ SHF 6ZɳV0$ki d(Vii4SDzN(0RUHHK'kEjڐ@ujUk&- X:+#&~q4˧w8{3śa 9\ yua4.~G[AӾ%T9@, @r_Ó 3/>/W~hS4zA~zt1fv ozf_&-|SQ0嘉 3u5yDxH6qȄU/9I!)v 洘xB}m%1EZ_wWo!ϟ~B0^zg>NĤ,$Eu0< WA>%Y\ca֡L*鐒d3Aß ”'2G;h"~ڨN5b߮mKm{2ؾ; H1E:bti*+10G^_A q J{&ęYS5Ԙ/yP 3Q``Pm!ЌƩR@k˹0Oc4,2˨ DZmA1KpBe @0 Qc($AC3ƁYd,tQeDz*#! Mڂ $4219ʕi`ǖ{f3OR魇&SUA ǚ  &0ƃ9xeYX0YFA [[7Qā_Eo1 !uV, M'FQüJӖ pe A,6T` !oHLbȄˬ@I =ŅM hnT:4;crL,3&< ί9+L-wLHߑFI.l $ǏMP4(_!yeEV_g__s_!ЙT2 •3@ٹE-c.p6{0u޺8BM >E^؉m~Rƽp0[98aL(O(:ʝsV:)qM[̈́d]{۔vm-Fm|vVeUq2m6w"Z{nIi2hH`ƒ0J3|p=HPJK({*ږMXХXPc//ƅNR-^_\+|i~1AbbIy-N1U~1abbc]_LXq~1+t~b/wb- pLӀ8Ai  h;J{8/)+^_L[}֮_L垣T__LŔ__LXzN]K)Fb9 *~߾Uo6ypseW<'UyX|;VH!Q]G͕P'fAHvRO`3/wO_ j`Im[+}99#3;8LOiuGFTJ9]PM/J %1A֖D>.w0Sb`%cSMcgW%c!( 7 ݠ<,+6Q x7KKkⲻ ܕ B,6H(6Q\vLO쫸7(nK I)EG3]Ymak&(ZZycz ŨK4pa&mbLm쇢zfP z^ }5|45uWH6͍D&lüf ɟ?& !xz_Lm|BƝ0'72 (i5okPip]m?=%-.V72 8jؒ)b%}w nDi|r%ZxJn]6ڌ+B-U9fZ-@sg*@dbFpv页QX5-8ѡ=#;Opyݩƌ. ;DW:TS*gÕ-J/jTr9KeWI6~[jGQzzՙVgT۩Gh%ݩwXm&֐ٝj'[Q\ew&؊-;S1C|E^ߣZBEӚxsMEϧW6#t#뒼,KyڛϬe:3&?No}N^r%ibg.]?kv4X:Ɗr?5ZGS2cHYe3V08x-~)2+o8]J/\DdJȣ/CtϺ Z){@A cvRSXf ִnMH.A2uۣu>p@A cv8/TYR5[hL)y{MJj!O`NfSw2 .^dڀ|"$S̚c \ܷncn1n Fy*7ގukBBp )8Y7E= A- c+v)4J]ր|"$Sǚ pߺqv~P jmdzS?Rɴ E4H=MWXUĠ6p <%rQiu H.A2E޳nw Š脎u;\--bMք|""SA޴nIy~P NXukBBp )N y1IMfv.P891QM`twZpecl @f8VF5A ڵ,㘇%L횩r-''sbVZN((&5b-Vf\ Ҩ&pVg ƫ>?b@\21cMjcw/1c̍jb{1f%1cnR8ֲ{1f1cnT݋11c̍j{1f1cnR{1fA1cnT(݋1 MY1s^ *ƘcIMTŘ cnVĮCƘeT1sSc̊c1ܨ&S17 \Ř5"ƘcIMЌŘd$ƘcQMt/ƬF1c j \"X1s@ \"n5aYeO48MfӿגĚIzGƒ?I&c2,.Gyџ~=15[K?3c<țd{)*1f5Aҹ9%#1&5 :#V#1MmCdv}e&W ɷ7̛ͧ-GS}p"k Kh?(HbyOs$G?Kέ`Ҋ!O39daJLDcC3oRVM-2N bs=xr2U 038KG*t90L^{*%sL֝?c-ť۔QM^]ޫ\(o 2r@{#үzOW$hYǣi>__;c(u3]r2=eI*bNN%9+ GS{x!tw*3y>٨z0TQL;o~gjg']>vvLB2-kiVNfӫdo뿜]iΧc7E':/}q.'aJ~j,{W&?/[-:\N8Bdg܉>V_) qdS)Q/J3q{PK?cmDJVy|uT%+3ǒ;w$#MnjR<DžL=.޶{,PV` {" nsZB=JxkqݎR@ab8A}_ykfb wYqV6s珡5v+T 3UëկR :mzɛin^*:'*~v:6z+q8Z caWN0Yrs8xaB"bp[B}wFI/4yʚeT2$''?NAv2H,OfF*ٯƵ|5kk=ⷛobdގۇj*Ld l-LgwU}]ۙeqG'%w)U&lzOߓX.4>}i2D}g/=nӾL~gV'g>kX& voMGݢX+ɕ ZόN_ۇW*+ݽr2󹊫E/S˹Kb,N{߹(f/ON0K%:tf\\zQFfvܽ|^wlqg˅7%ӼNO sRt /μ)wd45{Xs+P( Ie}d7%:U6Y1d?=x5?r\!plB{+I<0lE•nhrާ(mByiS"r s1[woo+`gg`U3?- 7_ v-7ZZsi^Z{8n ]y//FۏLuwW3^•!}&W}-[e]ֽȻ K9z΃ ɶQ}d<%74* [k|^wUg7ngs A(m÷%5Kp6`b;yl_jpauD,6  %L}ISpM3&*P N߻b6Q>N]8cXB$!r#5A JLS6#g)ޜ>2bgjŒ 0s͸COoNuGsxnÇ2Yͻ~]L?t^q*(gp d? AE ԙg})*U<8Ioԫp҆ġQƃۯY)E쾗綒jV|g’3,}_ /5d/_ַeUgSqǦe:\ߋ럣jq1"ﳞ>5hFePZ˶j DN!JZd\gET$.䳬8 9XIm{)\)Nmh__p]9S2  DH"uR׳b aaaaXUj|LZpA54rB)юVL D0Klk':2ϱٿm+Z+ۜyLN\F&t #$JMLSg8E,%lHP6 6p l$QP){$[{#/iͮGR-.4weMnH0ewb(KzY[~YOa{OJj}d&dx(OHnė<dS[@Jt@wn _/sq y0q U 9-@ZShMx)8ճ*̱vcX r&9u-U(E׭78?bNmgȠMІLߦyQT8ؘ#B$>^ *(EY:~mDn^]1dAXQ?C;I=wWs?=N&7wowJ"P_~{tJ"1rp*УkN@*pIML3jM hw iD_D^-tZ0]=w9i"Vn_:njYPC( V 䊱m3[~,e/Ke}y_)_bCU2 @pZodtRKG 5GX >_լo8dlIh N>cW{.]g.]!F! fUZ8JijUEû۪݋O'Ygg60z ee /=gO|;0$Tҽ.|:OJ%zNh(JwPb.s0?W(K t@+$zH)Kxt42)I(e2MP*&'16H,XaAY(ng|ӞMAډg|=ˆcLxXNbI~5Stnnc=진UT/B1q4BJMqҏVa`uD:RT #*;a,$_t)JxsvW'Drj'b/&k Fo&dœ6\6<5PdzYlΎNQpCDK#Tp,SWh7xkK9Dn7Dj}OޛA@f[󭲐*lw(@Zld0m23"SPuD|GJ@eeRyD*"Ʒ7su+,cik1ۀ<c):5 {#FPs6Pd'yd.Ì pX9X'(WyA6 az#VݳyB|tf՜7My{ؔ%6-( iå (G)v6qn;I݌53nn|u6#rZwXzh)kr>j1XyjS+֧S%9:e+J;1jb!tϓuJKuuɣV녵UˤDP>>謹'm(ˆhA^]`/&7rkYrg>Dn0 cZj/ڐ.U5{wHeTp&B͕~xoѫ1KuC)1[?[ ђ֓$},UN3.A _S{:336*K塥ObPJ (,+cF-؇sjbLC,d^33 AJ#ad$/Qgܽ-|!qg() ~!џdkVɬx?fzbV܋XܔR[kF > 2b}}V"g\Ox㬘%sRl^$ve . "+4QA ]S*FdҚRLYӌ_/pL c, 8SfJIb,Ը3]rcFLǸJ&(ox'qIcZ}sf1Skl.&j-/{8c>a(q3ǷQPUp< 螸tq2W+t1)dfVzAkRjIx_P*}ezJ=sl\VƬYL|82׬ˡ!n-ڇV[V0xgQ }Gʼ.,;w c}6K#7X]œ#&V.wf ̿0c/FPs˩Q3w.ь͝˙A>0*<4`sebT?4` *n9w6 x) 9=oB;)`^߻^[۝!whOܹu,w'gZTGא+j]+SeY딾q8 w1gm"wo̔CY.p!e'yOެ~T՟ $SX3@Uf_WGO:F8'jiOF[5YjjD螬Pׄ WZg0U3y$m>gĽh:E)ھNu=^7geޅ2ROtDa.]r9)):jFR9/_/ m'z9ZE}^OfpXs47߄֓o4T\N= J(z%D(Ҭkh-'Ş9Vl3Nw&10rd8(fqig"djGY=wvx>H1Y(}lϟGfm3>=t13~mY@ϠRI: bHC.>/q g/Y7cGo I<\ʮ^o?(y]t}/w넒5n% \f#$Pw%6IuUzH<'wSu=O)@L@9 .!Ar@@ե&pƽҞ:oQhf"~vVN~@+N!:L&EIMԾ>t9;Yu칣/3/N;Cx! 1RHF q{4`ڝ6B*#CrYG2'9ɶ%nH'×_g-C` /QgRUIEׅ.LIL[mT]vva]':9la;lk(' RԋTV8Ǽ̣p84F&Cˉڅ-'?Z P:x |m3W;%O'&KrB-?ԻtN:i^ۥY hýC3;k]iXQ98URArvG˵yYy9EK8<z~#sjF*,0ʅ- ie9à SF@ATOC;1?{*qٗ2שޝ|_MqU#PG#0_ 7Q^ъ2!?kW_}oV4VzsۇWW;R$:_EO WWiƢwDLl]/aE[ csbӱGuW &%Jc1!pL)cE8fP3EBǀXpX F~4(GK k|kF֡ihcQ1JڡN1p,P @{QYVZZR*}ާvL!68i+Zjh:I C}%*+*vi4.jT 0$\%vmoog@8Q@8 D;%)K F[l!V7ˡ؟[ Cto+ގ 4v{;Zvr0yQ4N p| (vA%lɝA f.xvאI rbHAܯvI iCeSZIF#d/%1@dA&> i3٣Z;e m"{(;0eo1Tn jU h@ Dҽ'9׸'wcJTCj[ȉcD wKIX]A c"OOk}jH oG'܆nώyD= Č/%ՒPфh*UYLSi0ߌ?R86bŠ&K@IXY w1ĜLRW7Z UKw3@ XV$n䭀5zd]J"P2NTkBWHihFb% d'V[5d X4^XGKb e|/n.]|) k7}ClL:}3İW*M[F60t&|h Lqo'EeΔU7[kSwˍ]< aj}w Bīqȕ¤VI@n⢠e ҟDÙR [c6" *g8 J1bE}|~}7*nJJ.cpV_B_%kҨ2R= +#p Y q,!,֮D̈sNSZ HJg F8!AHIH{Sgj kT׿lJ˄`HI yI 1J9r$Dq |EI!I 4 JI-TkZ.T~_>a0JVknz}4ɚ'؇hf?M.O>]pI1 _l,I,/|Ԙ\M,Z( O uiL0c0JG,BE)5A(\GT(4jBTb% QL2YZA j4K1Nf"-OVB,k;pAϞ"PZMkÆ`ˌZ ϕU{7Lv¼[I3J05qMɻǮ0 1Gb2yq8^dh Ɯz"4  H ydNxw<'NJw tJhU- C:L T@7Q Z$ ZB NF9|/6`${άU a'nQݚ biݚŻO>۰rpN`PU=uVjn^;FG[1k~ox'dyE4oޖs\.mx`^׆w/7pKzfR} [ma2]r5w]YXoc!~QBMډ^!ɗ=`O?߬@@䇇YmpsFrt89oNiaN~ <鯗,{UUkk0 #B{eUg`cqG؞1kȸ#36 ryޚV([ iTӛx`!G3~=.aqHp(R >kpr)XQF]iPXK*{w\JKf.%^蚑B}ddH&8²XzٛJp֙2c+.Buᢉ&\7wq;diSt FF{$F:#73*U[b%rS:{)m_RMuRʬUm_xzSRjXU kR{8m/z$K1R( $ cVS㹣9b\ ME>7ٽJ*T:"9]r$JwZ03-lEq*4 Ƙ ` w^qM4(8H @G FsV)5rS1M"cc8#K 6|s|T"E0W/)!AHHNg"zC|[g@3 'zDfU`Z$;3<0% w%}j!VXشbس=ė^8_]V{j0g.f~hYm"9'Uk//T]?58ۖ/,24Z7;pl\?¶eR7Z ;t=R ;Ok -I=9b< V秣9 Rk2c'*esj)$6^}"kRdz.Mfa2e+9w~v*Z]mAc$I' 9$(!Rb& uJ:PYP霹Gq08gV친]]͡{|աo bc@ۭeIkE(ޫyg;長1̘=\%v[‹\O,=ΡeN\+2_[*f3!N?.IM~q.ժ׫?nuѼvXzu7uhN\Ƃp\o4Dt΂cJk &5H 3+2S~bjn _jb*3fa0E̠VrLx+וEfpKW^526@{U& iZ(XT|r~+ʢ֣ۈ*I F3L_[)흈-DJV!-n-fHmp MяPD9enO-*wcj q;D')M!50FnY5 ߂*ng13&lT5=m2P9zUÉFn8+A]7: p!sb둁\P%^ɍv `!b"fnQ2'}p:|8^3QyLZoK\"HS*{6 Q){eՂT_4W'gw<ٝ%{2/%18CL k"ZQwXc,fsuvDuaC}vOҁzs-vc3S}%BPia6b[99UKbVdy?KOneXsTaI#cd"#ÉFmR/ HTXX` &HM,qF&8?f'z~;F!x΅ /ZB BsmSw%e vnpڭnEBRMiiLpk68V)\;+ԱX$&P2>mCr g)$}9KɎJb?J`j?{ǍAn1s,"%"{8~őyI4 ÚivWb|wbWٞf65׳!eG9=LwRH89i;1XkKܟngw}fHa*$?vpd )iӒPqZGx{qCiUU)[S-w|~{l׍4h;6Vl0”4dw^NI;iPuFz0*AӪ0fAeê0J$;urcl)c:5cK ~:257҈.6"3º`3:L4(WWzH.qzRZx;_Treb{ҋLcitL=/|C3Ъe떊5Nў}oVpVM}{j.= OBN mf|q *'xu;RB9i;Q^q,D4K6Η\Bchׁr4mYjo}a~+ v_Pb[H٥Oj`whdO X  cVD7Zڼi&TݦdzܾlxǾ1vw S\# $лdݡ!r?vL!+ן4n}2LR\[ fOlɖqIX+B9笇gw#xPs,-+WW_mg Sv[G)wm, >w3 лAHJ2݀&]a_<HJeݐFܖ>Ɔfa}SIhV{;/6mzt `\? {,GN_u+O7GïoFVbnc{rzs3|u_\*үE F6֮9Z,Ǝ!`hCrR%P8Q8֨ a+XC aΎ|=*.uX8zdL5qКtO:tc ҖsX\߼ތWYv&ⷴ:Ѥm.E|m$\wsg릃۳TOG~o?;@Gs[7qtqys=JLKgF y0Ly'3vd*fyHwo̎e=nW7UdYOώVic #@X-fi$++=zyG#-Nh5;.`I_ /sui-<6D=4fuk D6֖v*PVU8WV*NO9 $o+Й뤇x( (Jb.˼|@K 5rPbX"s(g+: /SQ  ^!G)/(dTB9gYY1+nl,CIVFZ̦/ l=LJZ_?C (KhzV$H^=?-u㮇]ߏ9ONj:pRӁ4Ӂ2 VfiVHL)ys *&bPtT\1}_(V̆Y*<_r+$m\Svy[$!hJ4y7V kQ*Vx A{ J]lr9HUŤ녆l aBl8d )P'g#pOq_l(hBk.nӃچלs`:%PY9%RI:b[%6A'H=UXDMu+1l karb UBk^ 0La>hjٌuߣh}rFY1_߇O%ɟdsT?g3.@$%jB2M| q͘dX>߀]Csc#Ѩ4T6{%?-\/mD fLh NEX<8Rlf|I:8{86Z*-D@ ~-" S.}x(-yPE@t 43`-~Ofje/Rl^F=,!'2Ei8lwQᤇS:IS( (Pɨ&ȥf(`QӁRfCX9e8'Y변^IP$iH:{ޡfLRJ@-兦06${Yl/ڂY1߽kWx>SAsF(2$-͉=I%C)*R;٠g#f|{;&r;]}`^UDFOg[ ].Z%s0a(8)QJCSq}́5봖uրfAѺ- (-hV$ KrZuRX6i0cfgѫmG91zv͘F `'ů9NRʉY/,8W5 c+)|IP7qd∞5[Eqb شXon..çnoמ:_74,j t1pS8*9(QR2"{LA`p7t 5MiXTӑ)]j|N̮%q}4je1pPc/j3bQdQK JP#D%, eJ7@JL8zN*@jpj$dm#[W)3+.R#fMV3'5sLʰ R>& hPrP&艙K8("'ϒM3,;rJI*Vِd9̠%f0UZ^b¨gNVYAȁ r}(DN~8?(b9Taּg|uU7J l/ \[OYVZOAͨ=VAGkmC)%)ְ$ Wm"u6=82-85[WIanLﲜaK9)GzF ؃>^|oPc6. ZKSa5(iә7MQ.'7gr2F:gG[8*S;|_ONg|pYv&6qmRjZ9{7 {,Xpu8N|$ke-%ON59B'(BIQBH%%$sr$O%ed"wfi:~T{̔g{ԣlmm#w ?ߝZ.Ykn~r~rg7tY P,'{ dH+;v(#ާ_:c90qE :e&^ ;)} !\=rlOE6聹+5F30k<0vdFR#:z).'#6VWCX`EA'@Axgg@ֿT˙*$檧٬!dOO/ t$Ĝ:gn%?^˿ژc=sR*IU48~mⒷ) c@D$18#-.bɗR 8Ŀsc.$|n/*]UgGӋ|~yÙ=_P)r4W_[\ kcΞ\ˣձNH7OM鳴 ť߼ ^!HV :O9c+2/cюHy' x"o-\vu0iZt;o`ۛZE0^@[8y& mEȄC3;^Gr`}dfQ*Jl+MV'o?J 2# T-`,B,ƕ?[ȱqϟ 1 &uso>/gD҅MqӤ08FGBlx׽N`l*ڼNËW=RъQg9Qعي9vn1[HEUʛfavM֛d9mm+B޲.H4r-I}WwN-^D^(–\N8s-PemAs&m>娅Fv.zcd*B- $+B3z ݻ>cnOr:x~?Q&&~xJ)OJgãT|PeObbZFK˚ 7W/g ᇻY ӵ/*Yjj)BqtN'./`?'_۹%'J=p;uU4 5jY8]Xrw*rt}գ?ᐼ">uAC\Q_7]]N.i77%vOާ&~fPh,CnH(0"0ɿm$m@~!3sA+I|R|= JvmrœilFͷR|B#i?Qݍ^T7+#\_?k4v5|X= 5o\"쿼9Ϯ^&-}WZ-uŘfY[;݅PnṰ*}3e4!`kΔ Eo75vzdi+A(FƯMw JkF2(C`@}#wa㸤ܼ8ˏ'@u)jesXQA`{?:z TNT'wO؟jZnP겋zlX]S^)K rQOU''}5z_mI?Wwn } hBk/7 c6ʯݕ?%v~4 vIz{~^f0f_i=0"F/zs:*9{oDlȣ_I6n211xF`Ѽ[zHև|&dS}ĹL~-U1:Fv]5z"nlj } <".|Kmw]NJ񉇦u l;g'N'+iyjռ9CO!Eo(Z@6\&cSpGI%{a:>9(34c*ui2Qi>䉲MD8#|4.5o(EjJq Sr/tHqfFj2'-h;p98N׸HaʬE}qTFп7Y0Y0Y0Y)&JaD(Fy_p2E:LT!J4hԁSAh(ok_ 1!Bި l"Fƾ 1*Y )p+g@_cU@)9R;/K)Rl pz> ev:w.!lsFNVj tL[(ѶߦDk9f!,Hiӭ>ǮD?оN ݽ F>1Sd;n8nZ^gzx]%Df}xZNFOWhmq̿īiq'`ZrhGhQAc!w X山&;waw| ظЁ{U'+ jldRS->sjzbzivmZS䢏ދmS-e!Qe8F'TꃭLAR'@=X7nI6WM,:wKA餎Ż07 қEz>,72}Ĺ4.2vKA˜8F퀻j-ح MeSHdM r,k1.bv`-v`ހ@Tzg_|=Ӗ q}H~D¼JO؞)u}'n3cɖ6p@Xߠ*p  2=d f. (5dNn/z}H1c>" n^qvgejk @e9`V @AљF2ѼS9Q#xDUi#xF65!4i|FqOXa z-U)h`t1>ɐsA̦qYb|0C ^[ީ4GڍU$f V>P 6"/pyD>vin X*k?H652;Q2<ϔeVpA)xQ".[Vw#3HTG222/$tRnBu8W_[ <[,X,X,XiͣY)%VUa VbIO-]*,ѢZ裗ԁ^RoGdp S^Om~99IK9Zκ U~>F( j#IW/T L12ލ/PI9Mşiҷvqz\DP`DИ D8)t 40y+4q<ɖ"Qw7@qw~ҫ6ًYYYFUqFUPP,{esktvZ)t׽WN72C~.\!+t':Z"+8<0Yܓ,)#g8LwSHҩƟZEe=?+u 'yarcQMrFq1 1aGcg?%)OnJ_yl.UQMơRqn-vpJ_nCY)}1pƋ8騲2%6Y$~.45Q:BNj V=!`Kͩ+C. Oo_c3ܨv_WC P/VcOLYawp pX𭦬 ѩh=u\ C )Y#?p8VTt:Ujؚƽ,M&f2H;2Z?T5@*HO55p-]'oȇh90ΡE` PΠԥ,w+ފU 3[A?XŶݏ)S$3&W\j9'KCc^y%C%~~G ("KڴZʣqT5ŬQ)84Mq7ΝesOc3s dyt4]./mX:倓"?|$(*߽<8{Ne&56H)1x2f# c`1F.o9D0q"Sɫ7#&wo[uI|ߠ@)8YN2BOT#Pmа+Q>_fт>aٳZ$`hTI!Hci(`(%[t鄿t`[骕`}=ViJWaIJhg3΅zGjs8 Cuw_/+xՐ>$I&Ug*k[Xa2paro&쇬gFpN-etq.QFxbqHۦ?fi} N#4Ҭ$Szےb:w$Ş`E0TRl͌e-z_RL3dl,)Tj Z+ M\$13TRlCTimq@Uln [I Yѽ(""d*P |7AjëQ^8.;z=+yƇhf,7gĈ ~i[~O0] ';iS0EZ#@46I.hRoPyq!66zIiϣLnGZm3go п.t2KLXJY2{r|SRҽz\;O9 FZqxa!9>9 ɾ4|1w;RoTt)4]CôU-W_тhܤ؞Pm0B Lg4~?\L93sˀ5c`zy.90EnoJJJ k5 syp)O1Ѱ"_Y8[4K'e Czǰn#ꛏ =ʹhނO_,[ر-heM/Uk@ұĪj5!,&/R*/|PhBw88Bv 2|]=fR.a|0(29Qڌ;[4VKڋϟ&"k+15jqZ 8:tq!xDBP+RF/%ﴱK=҇,AH`t$|)609΁h AhтƎ.:p͐kWҫaˬ(<aRiZom;z;v(҉k8aHi#A]WI"GЖDd@h-h;K( 8^u!ğ "KG+n_*xӅ%z {">KeSl{vCMwWŻ*NOě8B,z?J?̈ nY ﴱyUЁkpB|uu5M\&S4FƚZ&o)N;z9Ut҅F#Śp`SLԋv"7 mzGicG+C:CpD/W6),Z!,,`C9l{}nM-%)z]4z4/zA8<-|Bw88P:<2KtDY@snq9R(I n)tVr q^&f@vhM^x/d0өS(&%"%+ b7̌Gq))?14z: # S"&]H.Vr8:hJ|NDoFr)D!. #PN2X0bL0 s|ˉi':ѓk;m<;>ǯ:`M^!dtKUGV6kX%V"6nb16v chrSO$c'ie[{;~7/spIkѮ=ڜS1}i3%iRn\.|̐Tl5C9=)\//S\]\IҞ+#pĞ6T"3ro?!7;\&סi]߬LhW b3ŧ+{PKj<&@vtʄM τB0J+)Չ&S1瞈Foֆ**o?צDV;YMa1/ /8jΊTtm_<,um h+ `s )0ͬf[=NƵUTE::HEӓsJ TupP⚴h#M FmK`w ͐1LpĮwql=MCP{?4DPע63v1]-cݢcI5q6HxAח6=].[5 Q,ex,xl ;LQrQĤae]AӷL8c[>h2i | Q:8wh,0j`*R_q1uė]cqn3 i~NA0T"/, u"2߁tX P LNG'cQ^+y)*vSxZu*~-T'SJGP%W4:y&4.궍ŇetO:Cr^,9˕XE-x3#Y'cYv嬇F5;R񕞵9Z Ed'гq<)T#ĤXy'')QSҞ#ki#_hwNn. l-ph5P=6$ wSm!vݯ:9UuO-,EDɥ2U* _j|B7N)jnyВkNh/q  zo=c1U!mayx>K/6F#,~mX~\d0LLpZ{Yv@UB n)l ڢ!"(d?ϭm(?2еՎ(3oRn5܀R" 3ȬMc".fF' OFϸ` y;{S4`٪r>aPA4q*Wk qpв`%:SFQ7sf=e@|ʤ&dK^p?I:`3O6_$RhPp f!nj@ݟ^37~*P_?.\h:!')#@n~3`o@oQ2z bɽSuO/fJfZZl0 -,>_[Wb(L Yw&$gD:uTQ9],"\L>G$_?A&3~gaϊ>r:dxx b=ۿ8_}^Ydj g3Je/fhWT pڿVb3bʯ!,S\."˵ҊE[TDPm:ZXip&jXgs0f`I {()%h؏z{@*3_+XuoT:F5GA۷ \KyJV5,[|v HZ"p:'}mɍSKԘvͧg+3JRs}㋧򸄁e"X;u{GZRi$Ov&AD2G'g5%0^^%,~\q+O9Kjr5/jUL7806#K,}V"'!-1 a>Xa# Cha^߆n|:ZkA,VĄTr%~`xB  ީ8YSC znmCxԧK¤/6N[o?lscmgcoݷߵm'M?Qopyƻ{kom!w]W[/xrsc7kg|M4AAo_޽0ґAvz狛pWmvsw|^9?`3c^#_農\twh^,? %d]sdg`?y~cPr^gy5$ɿ8O?\Q 瓟/=:0uIhoA?Y?JWvNl~ޱiαߏAзp n—H[IڿƤߞ<8Oh:0l6ғG_~J?Zozan(&WDɫλvRa[o3z蚳A.VwzTn$X^9)~W_n6G9%_ ӷBcgt]lwR /jܙx`GT귬nw"!)B0/=mg8eta`~<:_{>A{mn~wN`?Ҥ9NϿ>9Zd"kF߾mz#_h[p A]v{.1p;o~q~:9)͈B_wpēߟ']9>/x-8`R ߊT{>u)c[;+h .<0A5ml`W117f`r@KQ*?tZ| Џl] <<:~y8'즙q>.t)/ eWTtX|@c AD{g- IÕbEZiUZc1HIsdC*4S TW/MЄ MCz` 0H(78-1˗<^eNo%N9hS,z\"̨DR-Hx3B__`7؋إ7cq9K9,嘳c&sg+`uNThbZvRasJD橮3PAh63@2m(ɔucOcHCcHg'HRR2:b2;7V 툴8Z̵ u=EYYEN$9մ`RH2c2) BzZDI,3"D%=M[s!u@qfUsUAbЌ 3f&Z 7` W gj5dAm[j# `%8R␖}з|Qi2&WZf#1MUwq& <}<-1fN $eAUs؈c$S6V"?Ws_$e[C@a3+h`7C4PM  Ey@v')x/q, a*y4ϳe^Ѵm4dԀ+ge VB9B83K"RL֞(F(U S8=Z W"/A޹Os>gs>guзjJRB9O"  0{ jBhKUL03X#HP#dE)0 N{2{ L%)*pMizRjKN #\QFS $hj+A(Z Q: ^P\0Qhz [PJ6IRYKO f0fY;@7e2J \9gt!r`#8A! 18(pp !Rq 1jN.t_Y:)O@o=Q=$Y( 9@@!Zd3X&%G0mMhh+W"&kNgOc"sE7hxE@ ωD" o]KpAV̰*n51N,[om띮B`X=QkI]I T\]>q"jɷ̷O}&_X`z|PJk 1D14AaG1C<8.cLݓUSܿeMНL+VŗuU!ŬMTZ= f2L4q2Ȍ<3FTBk. g) .쉕@T#87Sh+`4QKK*M3\[hc0&@٢ -d$LQ6i <: .` D^Ap- 9̤"P 1Gי_:lpPQ >c0N !懹w;J:HYh; ީuAAk1 s˟nPPY4,b:!')⠷: q#i6" ґ I1R:DMv$jȦ37Ηbu5j)HI1pc1xZysnmﴢJ:D)x b:D5P6zNպ![M=H?jMP-4OnR-Uh ՚TkZP 17ӥHbDY KaU#B~yBR% 5SK4QEKgBP_7HEY`?.ΧC6>/͍vWܟ3=ӖudY.^Yb#GG#AR KǁIF]vF_^G _^Af=شԕyzc"p&9'~+8=7 A,av/f{Oޭ{grpHNO%xnWc[YpŞ7};e!QΎ?.Ht2?蔟XNW{,66qm]!y=NO(u,z0sN>|t$2O'_=oᜪɧu=S|Sմ=;ޘ&g4|$U}NU57h YgՑzNA&*o_&N'|5q ?8­.-(mىNmZs4Z׎΁'=@C[Tvly^JaQ\ bEmͱzm}+&8*X}_;@1NɭzЙyES 3Jc<:|$wst8]OWK^ۯ(sWIL1;&֢ת-@+WRd'!"#X(Q9n=y8sbqZd^OyQmn{qݞ/^|ux;^z{#aڛ޾[__ܿGǑ,la)9 /tm%35q]!y|b/cM^ܾ~7~zwQfۖIc^\o|yuym 8Cb8h1绫+Y ^#дu YiYo{ɋookQxv.Y˞.]>L/*.r9.[e^*y= @Esޥn"!ftƝ.O)|w`Hךw镹T}fya5l4j ZF]⥋a=u?}r˽ԎHA'~ [Htut(\ost9g4 <#8PTܱzμX'}☴>FQv:: 3b% 0L~ڥgs5.[@$IZ΋/SxtfJXL.S2഼2C bqT6Rm.1[)Iq5Q~Fq_ĵuv*8_B j$Cկ lGjc^اLb'M*AܤH&P8*L\BΉZs_rʸb`Lqt-EZ'QEcTŲlsd>"6QsɋdŪlʈn'PQMQV>Ws D^iݣ' BZ8ԭ'(2ze>2֬Nc|HMn섩 ܼ}cwd*ځc)<(fS@#^$.J5=KԠTGra||_$l8u6,8\%ܿ ԝ$/q5~aI~!~3L1&7/.LaBkJH2b¨ e#M!^V&fDc%A X[X4K0 a9@(IW5uMTҐ~) WA:D Aht(iP y6dNhᘂ@X‒uD c23zLYfgn;ւZc]{KQڵ%Y ^u ٦TV=Z ZՊD-Kv/VPkק<^p%.:}noCeZR#^~FtV qřfZ-qrs,Cіh ۪B:L,#G!9 "yV[-2n7s%9~HHTWn!B)RvCHCV^II5cG01US#}B:"hRZ 9d-)52"ı3bIJgںydŇuHkpKNn^jv}TŶPvsR^ %U+UMPu>nokQ!UM<+1$:Ysu6}U4%>=0פjv[ԝpʶۂ|0YLM`%"6E%D}j6k8X/80N&Dh X 0wlR]yξ`Z\v] *%ԧ жVw~{[o^Xb`l4ցǃ!*y~XYv3D.S}*E9uM$%8i]d)?Ƌu~w[֤"E\ٿ6SH<Öe垾Rɋ#:I1۲mX \$f r}7&wR bcLZ sbmTzj*}F״aЧy~džm2QFPDEm$<\ʠ ]tq{:^|]Xe5Es*ڨįmڨ\VR~d6Rt;iNDέ9rZT %WP֢@y%#`%T6#3:ȔU*k}O\R),{h`gS`X :wEiGquvh-'%6^s-PQVG0~O5o+1k ΀y[m}Q:yp*4Ust7*ﷷX%$Z)>7զKx%meOVA7T,߫…ܼvmò6eR=d{Tyql ֢hPv>2|W.D4Q"lIҁmx0D~baLE;420b1l0Cti8k bA=0@܎!#ti \bI{)X%ݞ-k9ߑCWH% z{[wWMAE Ŵ(ƅ̤;0-ZL)0ZoraW<1l@mNŌ)2_ r2˸JjBT [h*Q}ZOW~ ۾}kj͙龂6}]otc&Eɒ}WZwYݮ߰*O\ jO>H8ao1=׃IJ|@ x#ۍm.ʪ֝cZMd E!;UWm̴wos@<=^zHV!Se?5+ ?~b*TcJruI#.;>xQ eaɚ\l(QAB}*X *WBi2'yhG2Il<0_vakMpC,VCXzNMSr,-35Ew!AcQdj|i;j5ZxmGԬ5&&ˁ>+^8kъ{|kي]ϨZs L‡]1_`w[/$!6t JrzT2R6fv P:J7;^kW{`I޿jWG~EfQzѝh`YXUmdAulBdB&Ů=F%)[Dd[ "A+Z(1kdDk۵b8GCPtX:NTԼVl(Fep\/$R׼C.<&rC2ծ3'}e@,|#,g Lۻ]YazPx9lR|>Ssm^-*P:uߛwd;}倶sb(?5y C?Qg{g>lv$JpoA.x7nsA;FWWIOjh?g?}@_loӿKѻnr+w{,}T/Gg<ȓP}p‰0:Ok{҃q<~wGnlOL?8"^f8/UV|*S-zpIl0/|7^77i (Z|GuVI7/! 7ޮ}UWծ}UW AСAsЈl&@.HޕHrҘjQ$um.v8ʮmWʺ"oeו IIT!L g?cEO~bUss__}))t}O_ ^~!An]~wR[ey]fyOxVT_z}$E~(>EGa@it+>ܯ2D$CRrNK 9 6L V M Ωq!G|^q$t ӠuX:$<.5b1Lٞ$*x *c,Koy]z[^m˫ؔB)`:?T)˚-Jy#d][ йخv}N)'MuUb-wwK@0#LJJV4,l2p[ȕ.@AlT6O]D}$p%5u{h6ɹ_辉_DM\sl??39g<]u-;\jĮYr{Pgh`~Q1ժjbwڭ !Koi2ζE]4&fRL}䊩IGn@W %X9[*&>-*geȫN>x' 驆3hg3-9{67z1߁SvzCξYneσzr,9J~Ȋbauj%Nx7+xwjp/aZ9Gi5/* luo|Z9F "2G}Kr-;Oj?vn8ysP396P` ٱ'+#}q‹{w"pV߲ʁٸ& BG"_[(ٔɧǭ)2o D'3bmn+s2svL*V__oYbI\&|w?N*VdewT}m0>qtJor%T)c3N2=*-;qNIeO\]fFߙt4͕xg֋x\zī+o]wY EdTQRLhڹwyE۠o[jeh+C+|vTå"o܋J|[Q^j)3֦bU8eWTlvGg8*TF3Zzɞg~Xe8QAKh3ƿꏨ;F>W !iJU2p7S΋=:{wVZe"iY7`VXyW>jS65#~qpއ-[)ٛ#=ײַfY< eC+aq,J53c-*;;3_>np>=;MJ D1RT@εFNBݬuNy~2it'Qz;=rN=sdyB sTMgv M2qxA\HppJDL 9{=ILLQ5}i)Ufͯ߮Ȉ5C~SϋN,-&CX %4ػkkd Ȝ Rd*ҨXZ-2hsƑ N5*Jbf3M:]Y[A^D&eYz([U牌Lj${Gd::2P6(]c$8O4~HGiY7߬m.}|.}|k( kKYnVP cQSVQESuQYY.DEPEmw}Pv]YlA@fgF@mHXWd xX}b'yQ,ᆳ ՋLlSIm yyЦ$*e/D.ko IP&N;jJ jﯜ]$bgpD%)ERP_WGG1uPEm}mهNRK}4/Bi)t^.&S[ƶ+i,A 3Ԧ*B>7i v qu 3_#{\U7Ix~DD3c!#"E·e~=;\@> i (,yIx^Hcǐ1$@%) ,MRV8{l6Ӕ%6|ΡM ط̱ÐKjCo(+eS_3lN%Q,#=DfR.`.[Qƌjebw#Fu^TQBR8غ|6%zw&~&F(rWD,Y au?ƉE8RUK}R~.RbP"%CFJgA8]3ֵ9hY ZFEX'* 8t)RO#d>2P& Hi݇T}P J /h1⏃ "3 * g~}zyң_-~Y/6<BtէFY#OoȱLd Qu4 V|gS#8 <<*H#ʵ&p4^'(R4w C0ÝT?G ra``U 78Tib tm5@cA5ٙHubDSA+bAV qÔi(:Lk(5HU8"݉x0R҉DwS!ITM'| Z-笎ʹj| ?aTB9b|%XcR)ZM#[E\ :VKSSYʼ,&/6U&/4L(TY7DjYj[[wAc zxA4>oKlZ >.p_xW_K_~ٮ|o_9\TA %raLiaH4z\K!伨-Ww}P+>3T+ zˠ%߷o4~e{i.-Zs(PZ^7@ـj95<>O_$GjbUv:Ky%֭{;$ dA8MVVe}rKqEN)N֢M NR%GJ7sEfΧpW8Weev qJ1}UQibs [z6z,ReBQUFy4yƛ?_˕ yNW229S2t ? Z;0*B 7+hXz\8ϚxDUxɫqbzls*)SFcشFc2kqq=/<,2{*  Xi(%lv S];;r}63@jϖ~ЍM6g3 Yg}?fkfWJ_I3H+=vpφ^њؗ&6Κ y6$d6n cSVL0h"x9e!W9tMP`eu<6ׁ ) kr Vr)P` -H^9P8tIE>T ;F`{U;Ed-zE{OIrT؏^xĺY;iA ,uMO"2R4Ɉ3ָZ5M.DQ }9#`4eDAG GBXߗ 뇌RX1QZ凌QR@IhA?ȵ5C.(IA&bH ɭ$eV汔EQCi*TF5oMi@B-v[BȀ;787G!+t[>_ 7Cg GY uЧFp9g#ƞLf59`MS!_ku-*)3RFdB3dMsi& `b,R#­Š[ gϧ0X9X2>l|<)2a }*,Ҧ_O !d1:njzY9y:zVC7E 5G- 6 TȣSGJD$<:CސCѡ y uARКѭ' Ą=A&ٵ󰎴TE3qN9g`FD|6TZ=3 fǜuW}5M,X ~ T|Nt2}>߃8"ӈHa>n?崾|8`F9vu؍4al==GJr.'!1_l2GF }d>޵0;⢮r[ pW"l6uoݭM+$싱?H8*Z3 c6h4֯Vjm"nG?G>ecq'ڜfɬ 'b]=mJ&ۿ)#X-m[فm{3pBtqVaːhuAMۣ0rXV!ʰX'UhHUEC9U0t9J Fw@.jM77bܐg˔ZunvxZ=ki*qr jB2鄦a̜*&,MԚsO5 /DJqYM]#h?|~|JqǦujE<H#<x{>L_?M,Ł9e<$CHI=9Y6'—2&wmmxrB2*V\ 2'l.A*5n.v?/{t1YI\~^qu\n, +J[N1AR}1nWk|)P'Xe {d%e{LiD G9b, 3qP FNrwP+_{ .B\HE?oBǏt0Ϗtr9SKuͬ*6t4R`JN.L$P-Qp.Ki\o9Uux Y u)On&m3f)67~;f(_t<n-Y% fo"G dK1 \3_,闟*ł9e<)p^{^ZgU qy}*&VwOZ58bnǹ Xns#mST #iVb ӲedR~tjT>߱7؟UQ >ߘ]o!g1KzVE>Y/b='Lճ(Zrn^߄߻@XH|XI<4=c|TQ|@}OPci0z#p}{׋ :/_0 v_2LpI7 s9~u4/G`5H^PL3ew~iJރ|Pp3jwFE3K97_z/gԓ!Ӗ]e|@e/j,q種F~ p+A&_( +L|e8_VzpV'SD]:]W4h҉秮1U%~Jz z@B s,$:$ A6\:ʹkup7jSeM`t5G 4h PQ%ee4yvkϭ^ D?d,̒t:6gp2 NN+J+ƸMfkWJXR·v֮X]@ D{B5% W.\l-մ`>h:@m:pe U3shdOERp)JmMgdR̪=zíUݷUwyjSRykAu74ADh}̤:ђi(vUE!J >+ս 'uxE.q{T_Zqrjھonn&?Xd@"a{ZbFZ0S)k{ y־aڬyQTFl mb a8 |1AA nIA:1BouD4fF 1! xJT eF) !Y+'Ou+uƚ~Ղ굓La1j0`$vy߇Ya KU.|}rM=$47υc*_vW@!+pc ۅE!ۣ0}s\+HΛW;I?Lb?Lb?Lb?Lpgw'}='g.-c,˵s#NQY.G9FJs4Ͻvy;(wqf9ZJ:q 6+^Џ_'/8ǰKZ:Rp U^zX2p8ωX:z{tbV?ݟL*B@_|02vU dj[ K$S1% +VvdzW}h!#"6ɉ`Wa4w 9it+FSG$% !4wH!Mϸ2zD>h#gRQډjA~ Cs}ДXmHfEy6!)7ٸn}E#@LxRĠ`O"O|#qb|eKG\}T-RZś&%qLoYLݠJ3yG'L8{mD!CX#[i͐F1|䠫F<jăŃMRcBH4 x̥"eҧ"k0cU ߫~KvHӔe Z/8{ @0P)IarDB6t5nĆ$ #0I2I)N)deP T-5`dp@FYD配 ʂPAL2*ɴUJVBM@󝢖ml;^ S̮ ֠sò5H[# аoU\cPeع@|5^{cR3ǍB7HB#q)Ro!HTjgrz>~J!v;Ve +4P%^#1H.)EP0C%,07!ᴗ4^) @9C@1ιCDw X\J yzɗD/&_MhAT 5͐\ a$I (QĻM)6# f nurH=tQ.S;b,Nyi-pYj40".GLĊF%CNrڛve KNUBf*svlVU `llE:X$7$q! IqYB_s* n!#5bF(F+tb0.X :5F[\0LCs % LTZjKtU dU-KxUnCiYNKaۛW{:z3\`Б,NݲcHAuq;*FK|9UW_V6m4W4Σ.̍=h nnfơSn8 Nqq$ntod0Lð}O]AԶtN1 96,xgdItl[kN\JJ#BAQ[ kQT*MFTh cY%^=:nlcl|׉17h'6dA6crǩfM#(xȚ)p )k Λ1N6p&wvsی9$u!;7.di3=כE2[6&ś˸'{_<͇n^L/_(+(TwPLJctZÖimsqܠNw?H ' |$ ' MfnաR1$ZYBWrfm.!uuN3ENe,hb<T N\G`XT33ssGDKRĕDIC5 &jXFzL,U#^ʸSօk,-in LGNHʃFwīXP5GϴYrLE 55<2Oήv ImpMJNWeVGRSaw(4<` AR3Z+DBgj{8_!e/2 ۻX\ [)'bU)iH EtɞSU]o@~#:8&6 5т.ia p5ʹT8pIጓWNH\- 9kM+}E3N@\XpyYDKozm'C9Uv'ɀ=ښHGɂ}ld[d[B-U4P]$ }Z0˓\9Jes#e,Q‘gL ,В >B΢u jro򘱙D`C0(IZLzjZh1Ű|-*%[)!d6Rbt!)8'"y`)TSjyÎB2gu}^&l(PN{{Fi Ena58@m&` 0ꚔÎCqJNPZh# 01kY6zrI8ʵ xkR9oNDT{L[ S2ӎ%`I< L,J`TBsYe-Y8Ƒ6"]N(ek-rw-V/@DN;ܾtv)a-M6&v5 eF\FQl:6X:@ XiZKu*{TI\~mtvظs:Nivt}yڜ\#Fb+lm)+F߾mcq8+<㹙}mhn|A#҃dotƫ/y|5I?Y)XBЪ:(ί·IFۄ<[Io~ClvqFM>?N<4u#"_J/v*7Pn<0‡M]eX\e:N&P%2z3&mXDp L'?-f]}zjv RmN.S Rd{Y__2tꣴN:Ia`:x36 K(F]WΡtmEQNBZ"㱌UϟKpOnG+I?#nnݎ J&z2͂ۚvK&/ld]U ݠ7ғ>ʈq9j89A VgiuA0ɣn'݁}E!g4ǵPg)ف-$9iy>=BZ?OB4̓ŏ5N~fk^k@9\)5ifܞ"-R)Olۍ+}EvN$O.?v:Xq#ɩޝV4U9DQ=k3z>cTVɁ_5irN  7\}'_yxW@k$СUܽ;6|]97_*|aUFV =Su:{m?C~<ܐiG Dp@ڑXkPʤnN(-=S)@s2dCr-&:vUJ 2JmVdYi `џD\1jL2ׄȇixOV|\وnSzB;[4UBdž'oǀ%m+3BR*,Cw0C-.Pa\~54xZ[C=h2RdkĆF9 ts6m}]AB RCOb9H%glL9}jP <aȈسҹ_NbxyΩK+><-gUP`WIWPeîڒHތ+g+ߓ89Ȏ /ٱO+q(=_oImK“sڥg\j V0'4@1 qS;eQ 0(v1sע%Q ߖ ,2]⳯}gAYRw[? tVcRܣܥC|41wWY||4 h(uBQ/{I fPE-,#)%BKz`fCȌ'%)kDF6RWdO v1A)}AXHD4 (xya " z-գ=㾊^MKgXW >Hl4A8fL,Q+2[dѹ͌<wA Qd,ΗfyJfo|J-D: /}҃X{.ܺxK8>nײ{޽~Vyì\ 饟."?3G&-[F?O|Ɉo///\OӮgXv:'hcFXdoLLIB٢l}Eq[4vֶ`򜛓e[; SOA?Fj#X?Orq='tYp ilADC|/T骗Q[` sO߼uZu=d2*ofcP^Y"K_LFNFdNj{jP9nX-GQk#n }+^;k|2 ӪY]ko;̳֞1oY׻i˗άs$IWR;Yy\?0ydG_._ǯٍgz8_rXܗ[?%^Afp#[G$@V<=!i0,NWU]]AP5)\pz>^}>Y:/(:! FhJID83.D)b`Ө.h)/!s8 + Fip{- &jἔ SC kuiƇ*եSxo {i;Ln4O=8~׷wZR j+ֺ+|Z|!W>!)Cc+3 4w\vcZ8{&}d=չ@8 d.~^RF& JJdnRNg'U ݇_fϒYSyMjՃ{uu^=*ϼ}8ΓW̘V!b㵟Y+a@ *xtk\ {JxKԽYδVA\_;=!w u]?_~sԑUV}Y 2oe]W?;tz=ӭy+)?f)Z?oX̀rW$V3'2?~Mu]!WVho1OzuL`N 83f{se Fb B6iPqQ v4.u ]2\e&:Y#7NiqI]\RL[Jz۳%Faw8H6EJ$+с}Eba-&tIz$&z$^eNP9T׏0ǡfcSq>$3&3_D(\k*i>p'J;qGv%SLUY ^FX4VY^A0-MinǠBHh)rThfIP$j0_sQJ(T$>D*:õKX Js4\ ݟWnJ>fdc!GmY ܜYPLlYy_&Zy_3@4 h09ג\H:E=^$AP}amMk e8O$[Wl]Mu5i[W ,n.'<8$$/*z3K͂$k Y;`+:W`퐇M%vRCs14[,k-f5:9HHx",𜫈_DM `}Z 5V޶jɠJHb(kZbJ$L*P擯McX_AwԕÊ%WNghXƏJB.db쵽`gׅfC7b:RJH[6Nvq-GB7mlA}Բ@I͆1Nv>-np 5nzW/Mpb`u5BMnfpf^ʁ}{Z"9QfBs4hw VRN ^Q $q:k޲,*UyI! e0JJ#UG)fPNF!`Е.5 :\ֽ< (|y;᷹Eʮ=2=BF4 4љJv(Պ&aWN^u݊uGJ^Abbf[!,R{`w VY afݬ\Tr3PYnc6ƼSZToú ˴r{`WPd8:Fڂ2Z5tgVce@Fݽ;قjjx߻nح=ZC@Swu"`R!ګd}C*\!j"!|^*ΜG:ΧpoTN cnC6 HkCW {LCC"l8$ou[+c6ZIe L̈́N7'ˆ 09V=ۓ(nFL‷0 aL&j>!hm 14/¶-> 2q |Weh 9NaɗQ q]lXqVӠ@/%C`=#xMPqg"EǢߜ/gQa/w 4iI_֣$C*>Y[ЕѺwGqK6,SRuО֙TLDaJ,lvǚHT d8n4Jji7h)I^$(4a;`-RJ+k%0Ɨĭ-҇'OB~qBΊ8.sI:O1z8%Nd+Xq8ޮEL ʇ^㗸}RtyA"|8ZJi? cb{(WVU{%Xh!-=8p9fu=-IS2/ᶋf^­(WãBӪ0'[LKlj?='208*A1=kw2Cz=B0v|a6,ѱN7HШ~p)衤B`ZqT>3dOaM[?ćaYX7 Ӗ3B{-Э| ]_ޱaMHggp%qڒ/O"'ross6/wUf@F¬DCtMgH#­eoP&eoP&vkt2zjM-ktBZ/MP'A;'pJ ֵUu|nzAlH'Ylmv<Bm ]J|ÕdʲĔ0?yAg-`ޒ9`wO[6qta|CCc*))~tfT~ XUӡP'm(\^+}v0X.id>)U|WI P =O44eR`*_jPR%RUpmX' u$8K)7)y,+@'Jcn)r5z > 6]6 $D &UK,FIwR#*\)\NRʹ@TZ"AY PNlbo(k/j1hԭ$Ƭf|{|<߻i{4oOs[f7g߿zkIT(<+ͯƏyyQȆUO8y |KqD)9鞓9鞓vgR\(zT% pG}?< T3+Z(גMP|)^;PեxkcG݃ 2Pz_B'9|C'9|oˡ MC̀$ U4z<1iOIRy#x0I2Y`&AT%OYZ!ކ]\a^@Wg ǻxv9~ŋgoyFůM9yQIzAYcAIMͽ\po)SԣG ǩ|*fq8qfh_/UvTh3r[9yMN<\M.ly>2E;m:Nm':!t z$Fz\ggvτoڦR@@>H~}[/-{u=\le cޢp5^ًfXB "528tDjz|%+ʂy|kPmkݠ}{  'ݲmF PlT3'|l"^!f )A(/< uyyPRt J0sOw{=Ӧ[- ;7E 7 + kW3e3@&-,vZXA?(yŜZXt(Sdľ?rγqӁIRI[F.=}>:OpoM1Ա5E7pgp{fX{ ly7ψvHYN$;N;i; Ci"!pB!ZX LRZI΄4gTZ;fj?TJU"tMPlǜ|A \rbiȶVgtvy{<"!IST>&@0 !2Lе:Ͳ盇zEM Ղ#>-P¾N ΤK"k75rvFOO龙vSCaoV2F`~x^i;5Z_ v~Z~FAO 2d;o&awQ1Ģ $sƙNu,EÍkqF/5|?!l8|`ݶ4WnI׈~H7XGYUXU$8uHס-mjřrT S#ZS##: P0uŏfz%(OW^j5}Z\AJڀw/JCKi.z_ε$:D n2!Y\3eHpڅM>frIiU&E.b4I`OcCZ>m'.[<\؇֑_kcӄmξoE.~QwuBWs7u1/.%S oq ~,P> N w\Roߕt>oX9Ccx[5_1Ŋ˳"^iC.ScM7M)#b[rqv5-r5 3mzl)oo|GVϼtn{K3̕?P1>4ۤO_VOzG[-K,6Hƚ_q~:KƏ.,.cWwPBŴd?ep@;yvo]*#g>L<Yw!_˞d/:b6p"nQC=`A\r `[&8Bo9_v7۸DtACYJ] h^v/C7楛 HlHU㙊~}iԼ( іVb`CM;ysKml2Y:zܹXT%%]lTݼ wl0X{Y/MfΆżBZZBnKrVp❥z6?.c\.DHKAt:)օCV֭ h*QR.IJ 3wv J|S t}nJ{RX:{@N ?b5pRp9:0>J5 93?݋yHKC(Aּ. {4'RއD-ZL{:A*0З5'F8 Q ҝKxzr _KH v“, +d]0eWIEebaPf!֚p|y~2GbqM0G֝/ddA[wn'{2}8eh#a޸Kil5ɃCqtmvNxep|FHN, Bg`ƵJ'́sR2󽔩B2X<2զ%K})QzKR)>R_ɜ8$?б\`]dLW%Q'XPcr Ĩ[ޡkYujzñ7u6Vڳ59NEs9/s- CJy\Gi\AQȣX|MC)CП *I*BZͩө`8RĜ,'dkϮ`zϦ-$o|PPSf`ÞfEbAT PeJ0 i0<5I)F;Cꉼ(?/lCk 4kYPɞyǩT c!(F251Xr& ?QѬgThseưkтپݽ [й1.!p #-kLcذ,16񨂡~T6x jM=}gخ!ww^_s;?L{SGѰ McGΗ`I#pAtąd 8gn7fv377MH7u&ɮ[wAU|@ Lk"ЮS`Hr袃զ%JxiARU1v"_^H1$% IlEQ ?Z)_%fVPTr8{Ru C*Q)a*!s6+'_:x Gqm}f< @+Ay2q70|z(=Q-a5 [EݛިO~m0nn2V!w>|B™Τx ={h˸9 J+~ﳯL6=}j0 1b[jHԐ˫V#1pX]R˶*M{X-2Jal aʗ3kv F˒gΒ،&ߴNTeUPO"aj3߃&1.QcIUlY']7ɮ;㝱!T]g.ntu=80;QfӫǨһuaQd, GJWbNcLEX̾D9å5_%JjԹ(ή=ˑRRڳ%GG">aN8Ct\:"?ԥ;bPN`Tǻ7uGF$ɘؿyYz;lM^ܤq2j%CZ&i}Oki !S(1m9eSt=)A?M>f ) )3 vu!ºZ%x ⻬vKm}L$wNkm ~vK _c8 8WKvjN4\՗.'˔FRu[dUHȥ$ޣK~h+xѪ^ȷ'AͿkukN..Sat;`%5swE*Hkn_Пy:p$c\2NŸMe1heAɱQ L>_jARd4# wX(8jxM!2wTbFвG,1+)ªmhVmk$l@8˜7?ȴΙU{&)y yϲ@K[a{yk;grXB#>DVX.(u {adCޙ'կ`cǽU8p}yhnuԞYV.XqtED v fM7ƿ/"Sj]8+;QMSnyj%@ B ,ՀR`Bi)ac#\>@rx =c r5Ҩ9>y"O,HZQ=bH%FT " IAi,!kD2)gq*-:ag j"S}KśnNM4VcJr0]b*AÈbΩ4M1 )( <~jg%o~ѳ;!\ZTlLYK42X,:N8 f2f`3]ØETA"ܡKnb"(;PvBLX8PG*)m ##Ŭ.1$jIw:u)7H*i #CH( wj t3[ItlAUÖ9 SP &MJ4rD!kF i1U"Cpz=L?Bcb@0bRoֽspe.|K@ ܑ7}hfykᣟ x?Fx>/u!{;t | Q) ^.522w:o 1-~ c A{1'L%Jw wfa? @wo0a80U@m~Su3"iF6C\A昨.sL`C^~/G *sXm KM`,DT(Xe[»SR F6fB ժtUl XI 7%_G̦ {n8ujn7fv377 c>yE0z;⮳H`[V:Mk%W)"Nk+'!)Z$H)ziT2q^7>}dߖ 'fDΨeRRͤF1g0E9qsNBo vQ)ќoJ}m}u[DWS Tn5<`g*+bðWWO_/=/*'_4i_A*!qT!xMd)N)bvovY>?Y|zXLo8qb,& ]|o7}u V[B: H륷R6jus%N ˻U=MwemH02;Q~x7czfe7˒[IJER(EwےD~@"H|IњzV\`g21C9gUGT,|pcBΦIR=An3Qy(|w0w$QTS%9oM'pdS+t'2FwB*)%,» aocEʰn;<Ǵgm[÷q߹~9>۾j2s:uN3߽(9#6ŮЍX!M8 o`I|䆓*DmPyIRcYV*iajđBGGn*cZR-#7M;_ZU㭉"`9[-+&N?Zdρm4fTa @o\1z70#qmЉH+.X+NpEpS0}xsȷ6BKMhUDev͓;,|=[~YO{@0z%koZ}g4.-x9oWUvnH:kqjOwl-O`Rf7`<KpC7ےyT$씻҃YS) "NA*I0VGr70JOj{2(+]xaƏRu\&m>-_􎗘\>$)+F)4o~! 3DBxJnGǠ ,㪔J_y27/hʨҙF,b:iʓNQPwa ʈBP DX T-%!^,x¶v&ڏDumNquhGYُ"^[`0̝Ĉ+ra.Ժ H\dOj~r/@`tʳxYΗ˺q)bSD1jqɥ({% H"CSYOn1.Dϣ}t%ӽbQB%CXxC,e*nȣ=؝pq<墨 1TV{ TA:(Ϫ}(\U-{ً 8x9ʪֽijc<:$*kM^Ye{ɍɦqy&(N%^:ਥMiTՖ.J:׹zS5v*HO 靺V+Ł 3+%?JbTZV2M%*Y H(XW3LIJ#H`a֤ƳÔwF"Ec\'"I)Ӳx/4qCDADuw #ơ;Jud-ۀ}IO򧓞֋t]?$'DE)>S{8*Dz,Q}xe@ $HC`E0!Ъ ^F.6_ޣZoL;), ,9#zĀ^z@O3Csΐ!=ĈP1,>^LC Gj$ vW/PZԺk{}IJjH&Zջ`=6&~]:qkf[=Z}q_81G:5%ӷ=ږ6'?z5/b23GaQ] sFr@YU縹5\,Nfo}JW p62$)9T`,Os/%ų'3h)Ep$ums|fRiVG }C4Qɚ9W ϔ tzƝgĻQjnhc*onf|*x|5X,BRw(R{;?dU~nfikjq= cx7퉗q7^nG=t3}_{q5 ZD }q'{Go[TQmPd=pp9jOn1RqԴ@!pO'GQD`FINZ&"dH I_{IGP@\. 6 ? ϥ%o3-1e_]]a"/ڗx}? k?@Oe.Baܷ\|SOUZ0T_U/V ' S>hކ+qH 0(/F9mn77?{$^PXZxܞiik񒚆V#uOZ{SL"\,&*# BV%1*jf- &ClǴP2t]QA7J3BTR11F֩Т^xA, LP5L*Ig N\WWuzO)v*$!ؾTCIs~^ƿ)O})8m#$*a՛ { H nUF*^bͽխ g<ȭPIqTIImG%E=e.:\ZqdGG3S*sw,9ZhX:9'钼 )fiikmcL957|ρN r(bSZJFvkXK"CA@lX9\h#q"ekhc{Ԓby'ݠBr&{'z/Y S[J^T5Z啠O˟I>MLeH7/.V%h) ?- N6 _B'Xz$RKDHk  c3].J֘]ѡ#c\#_;`D3!) -(*d—%=DJ1MDSkeLt815BH §p6򻋇0s̺ t(% 2db1G,Y0JstRԜXe:(^$\+lg&i}bT;*N&56`.i1> gv%S\zjqXވӖ%kc6]C6~+>}C?_d@ӊ.22n7P`T>ѸNGs~be>"G4a4p )ŀгHIUX.HR0 h)L:UJ_h`'*ɏ+P8an:oɌAG`]MtFA珵h(HhfdgK%bB] |DNo9Ƚ*\k»DG R/2`S>Y&zAz_^NrRrA6%%Yu"p g #96  .4yO֞A5Q\v]GīwMRFPk=a $ m'8/ 2༯c|zxqpS%HȕY!,xqhA`be{EL ?&ÜS G$kƎ.nK a:ERM3F,5(WFZkR+ sGB1m0鹳HI3K-]VМ`9\riJîEap_.ѡ,D5OIE0C"dtd!@9Kv1Ղ+BΚ1 e^sqPDSBu#Օg H";Pj!4X[x5, 'L2ߋ/ÕV"RڂR &`]uXҲ U4a:4j%ٽ!|KYpEFaw8 >dX$li5)0NhQD0Ψ5z0g~#65l0 "6z,BO՞~4U>@EYZH{yz➖#s .|۟qt!`Ы$EeyhADo}œ?#5?\Lt6z~ş Bꟹ-g{ËtW &6FT!,ۧ'w1'DKa-Zp1oz~iUv;L4e11TwtBC7$VTBHHQ-$ +A1Bf!}[sTy?{熀T[++_4y˅%L?U ,j%/ `Z?)USmTE[?UOh}(!%$,xZEK-ȃ!\S#%B(FBvϪ??/t(7*KM6n"7ao4*Ek0o4_9_Z̯'7s`αB9^d-*]̹Ǜcb`.Ss/K9B9VaG5 fׄ JM+aGS&`6q3%v)jzQR5kPc4 H.8KSOSԲ Nec(P=4n8硴m;e'$F0Xq0*=ÐXaUqXNzS '[뺮#F&F1ˠcR#ztpBeUjJN>z#}cX~fu[Q0y[-R"b v Qd NfQ-Be !}X;`\@7`e-pLGQtP%FQJ<>9A$ôܖ(uZ:+!lԶ5G1Z0I&5Ըt =R.ǁ_V1#`19XzGd]18%sK* "qKpG08ĥ()Q R+ ZҤԤDHZ0ƺrKyF 8gz_1e-70n,; &kq@=8 S0Hq۰fw*E#3:T:N%8jl!5%a# XMRuI5c"QS Ɏ\;|"ڣ_٠^_eCo~Yl~|w~yT?ۯd Rr (ONfݢQ^1>\6k&-ti{ܷ7?_o$$gKdн6 F`j9Mqrgl8S@:­&Y4/cj=uW֞kF1U/  ^sMI,DXZ:h!&=0sa肁n##@)47M/9BFtdRQb3:7H"|嗼tpWaj)wyQSشq^WW*yx { h/BGMKv /rSDPc9.zrlݰ#U,G!;<cF%s^fttfO0oFT]>8-2u贊L* t+bq)K *¤Ҽ)z'pŵԓ閺|j" vtQkyOq{S ]m5Hw2A$ =ۆ`  礣MW@ՑeD 8*z>y `ʷ}ٚ: S`^O]SLgԾi>x)CtC&DewmEVVDRծ@`C*Z}#Xy4v;~2c؁mv"L#oi_K'UwN~j!SvK,h–8j8‘ uɹtePUUV^kQi0\Em ۼwKEtc6aK&lN*Q>@n\?)ܻV[?|󈶅R2]R]0 ^5LY5\}]gn:5$o- YFYPvm+F ݮ(޵޼wQJQɬp*k۴{rJ"UZ8ZS8w*Pj!`qFzh߯G+K'#XÁP0?' 'Ι 39.ܾv޻|\3Y`Dq>OH$U9.Ux᳎?>ܲ?HS l}~A]7l&FuQêQ a1r@DEB;Q>B0&Tr99맟[ls b$n ax! (?'⩩92v OVqy(iO?@gZ [ VC0T#2A^Ynp0l/=&9/8Iz8A=p*xQ^}@(6ʽ&6)C@0Q\a1ıvUEjuesD1)2M^rL$r6E7:`Ԕ]O2aN0¤3DrL1 41-AU)9J4 ׆S#~0L\BV𮔸-?~J*%]^@O^XTbL`k9g96ҊW$l*"diu5k~J1 *0*V"t:m,)%gĚTq"ݣ;#1vEҐ 0#µV.{%u`!P+*x)f@DJ#bjZ*Vz2謲)[3-J* L60QAlɧͩdD۟oi">\/~^fC~z zw37Ooסӷ]|Jgm a_6ug UVZ麰6(<Ț3Z1KrOcEH뒢IvݦNLs!Zz7P:(jyP׃hZe 4C())kArd\∢8AAIWW>VT+5P:(i^Xf\z72vC{ A' ůwVjNG.7%]UDY_2|J,|}h4pγp+A)K>k?DKBYЈ3](vb\:YT`bo WUFdH5ʝF)"3Uį'9Ӿ@B*pF }Ij%0W$)2$Dh>.Iɗcrf#e^HB_.zmL2ڟ j)%FdB)S *ϏU !tTDK4~V TS)LW u`VT ݟWE=rJ "]QJSH^jde۵Zϻ;-۴&Pk$. (2˭ԕ(5+xq Y$$'ײ8dV3oYp!#S~ȯ*/Z|cz91OזrUcNc+Ef]Mek|Y7p 煕'hMOG\h*J"K]>E -*tJj<|7bJ\9m~u_!8j?&r4 ^Yoe.?߲]QxW*3ahl?тRA*Z8a:}>(yNt~-dyy~6Ĵ8lޒ[qy*L0<}{4jz4zJZ=.V1Ƽ<=1Rk=EG/iҕWv%Jls޽1|\9woRL]'%#zvl1q1o}`$6[NekW0X1/!J!=}o-B/t.B@0NLҸEIӤXHLad(BRJJKPjOh6)$%- :h'kcj<y$7בoLSu)6T3r౞ 8~^9C{]Ϯ w{Q,ؼrVWኧ5B:CkwkYj[,uU/"u!j?G@r U@d&ZQvYfB,PL ʤgRmY/U0"y/Rr3Kċ'-`;xK^.\ z<o_Wk9q G= }V+]U [s^0iQ(\u^ +>8[4QbM׽89ٻcRdP4d ͖8{NȖ<8ItlTѩNZR@lGŌ9o_Nb7Om@&,JmDDf>S)homd9`7⢖Kz1SH2'ČW)"'oΦAd=zhw@_~]dNF-rDe2o-S;fN aiѣ$ueyvquF 3;N/ fj]VLd/A7<٠tj:P\ThrB:|1)޶O %,oށR!ƑU+5„y o\+[-Ӡ:+^ŸH䘍]l4U'o> %9Cgj2#Q6LIc'QP BZ(FS?{ƍ K/dwG*=,o*u6W'/I `sW$%ǻ~( ΅C)W,A@75ծtmgz?á*Y#a"]]et}8abm[nb 6A?]E}QI`LGI+W: u39?dKpwo./XH^[_x%BCdu+F rjnaHf91\mV<61̶΢S-ZC/1nm$z }6G[Il8frS"5k#愴Yy̘\RVȸNbVi[$RVv iWIb!jBG49w2 k$0U}ԿW@h3{{;O,x{ 1VuuKm0i'f)0ڒdW '+mQ(^{5Ҳ%9,sޘ!ySp1 LS 3q{'οAzҟO̬O|n&?ߔJ)}{чi`cC `f>8̸88u, ɭx 洵>gAXr{|n4 a>~2L u^Ն>L~cOa{~3ٛ|F &{ruB[˥Ԓye(͕4<\SQcդq/ahԙ/)dP !vIҜ0mu+z|~L$ԟ0yڮ݌R; qО|oYWnbzwf{q3\*== Y{HU{wuRvJQEqk9D:ք3OJiX5^tH·@X]1*յW{;go~l1tD[ 8퓖⅓V™Ns)9P{}QKt'_Q +3މ&p,=X>NL$F#D(ݢ{ # ʺ?iH!rS_wˠi_C -Ç_UxJ]9{a%h\3A̋IGkys=]_/@1r+~b Wb '4 1' )/bT O;jlN9UTj{1B jiQ?}͛Gx,J9w/{ٛ3K%G.d rfuY@>zMv~;lk>%dޜSF{ε As( ,>%^PEBh71F?GIp ]c~mEƑW&3Hn5R2w!Oad(PIa-o9Akd-Vie8 8\.է*!;+Ʊ"NFXg~#p;0۱bcN5\ r^;0u9z/L*KD}m,ܼRS+5eRS+5eJMIbmJI\Fk]\VlERn6NA\=*-élan%L)]c 'RRQX._P% ILA,ni.SGeH1`Q{ G(e(.00@H4Wy$69yDz91&2ǵgVHE0=.0uRfVQ(G#+0WÀ Yr2}+Tz0--p\S_Q*%)Z k|ƥȢF"F(xIqg _1 o ).72K$8p@)  JI.3r* >5J/r8Od~Ld~1jI1"2r/gqqç4Bcq0*ƀ(A^[iK=(KpLW>5#{ 7=k ` ta\fAC\pVc/SUƄhL\ERRZ>*8 /}ОxԻAюxt/3qK>xw+ގ73tEH_30F#ﯯ]!Mz3SεOhZ~Cx> FkM `_T AB`O㚗Jg+Ӏl6 HD)'dx9za.eŸ䲴<楜R3_q$c4h)ך"5Z{w@Ͽ=s6fW8Q5ʆ;+”oEϒ-|bgxc G?}zka- QӠ ʄ0nrBw|B5rMJd`B1W;k]~OYhD@{pfnFzZO7e96q{}i0|~f *g \Ó%`ڡ4mr9\jyOKtw7<0yÐǔyF柷N׼Ę`c<Hǭn$iDu&_g{|4%sVoպOF2ԫ+-lJ5 +o'ϙZ#?*CV#9fD2)/F`1cxcWXBWJ's;@r"bbs+N1 2;hht%4Bcw9# j7gߗgTz3ˬxο. ]a;@ax7I#ЪH5Tz8Ţpi'gu"Jŕ[=-43(Oƭt=7MyOK 7f}{?%EH㎖ ;dRj ĔTRX3}NUhIraUƁf bD͎QQ6jetRU})9[׽+ ̆EqQ̟/Sq ۭk(`jST@ ,لKFD?$E5/Xr%JITrƫx0".oYR&ӟ<>e5_]<^<6}Mcw pqo^(.r apI"@Z[fÄf ͩL #U|BVeHfiۮr1"8ǵa-1+2Tʔ+gůTў20U@H >Um H}T^]T)aG]yo؉ 5sA/X\)&z 1A7c*oTiAY㚗jHlD 3GZR}ߘ] ġ n5nYJq%1.{6L1viD9׊zLǤTW9kxS:ͼ^:߭;'ωp4Ii*%z6u..vE|{\ːĴ᜾2$ju:-#e95M9MzC]\ /wI]‹uPzvmĴPqnLgNk5NďB-ll>ؤm2<$=)f.&$ؔGdm։znl1,+pwoݺISSWNiS)>RJ+Р`aŗUC߸O0{ruUC(FZ5.+m#[3#6Ǫ)vf~Ъi~pL6R6u oiZXS-薴G[m mS([> (y0{=76[\BSZ7R`뼭H6yJ Jj*[|cvtDA5u(84'd1UBm3Y6m"ź GC F&D NvoCb4$."6eǮWⅈ#.JNri_ʁK9P)3S19ChոdAoZ݊>.d=)쇉:Y3g e.Dfxi/W-C[+ 43 jiih? RG1wsMҏ2ʆ6^UZ9^%1=pD`[$!hk"'{&& )ѬaWhٜz a)wTDJԱr4dm X⇆^cT0-䅬GS'/ 56N&}Q?9i琂prJӡlO xw_)V?ǻAewڂ]h=Og]N w/ng=b`= 5K\Ѹf Nh rV35w r)jQ eu<'$ \pU|rw%vAD9"{tՊ}SVZRް馬g`ʚnZUn9]p+\)xev7QR(N$GH=d 1YfCS8^[F^ /yN1-ekbC"Jbv;vEۻL"UqlvAJ8#ْdi,CC>F{͵qB9*41yJT"?g8*$Vj[EN<  i\uNdM$?HYUT~ Qrot9_ 7,w0:!XLt]|-~_N}XIivʽ&Yj%(o_^tQG//Xцg\{5E97Ŋߦh,9A6ȓ(tQ!oTbl9!}G%x[J ?ી]uOUu1-腰JLk9MÚaK`nk쬵-_fV숨9[›譔$"Q J7bTE\F3ަ2nf\)׸ Whz,2lJ7JIhkZ5 npD"\F .D"ܤ*&.-5*6R TÑKpQ+&FDx.zVK^+ WVj}m*uuKqKre eu[_O`x-] {ccwXx]s?pmC #89։g JŤGJr,zac@/#)g.@xlܖŀf+N`Zػu4uO_jAwvQMiA;QQ 4Q|@ħ2XÆ 8NŜ+&[vj'iZ!X-h4z/7=f{Zeײ%Ql8/#N7$N>Fl轄V*Z2r*EuUL-"J%e)~8nOӠ.!xq%% ¥Dp#34dMxڼ/XW dmRĝgy뭈B;;O٪R/;Sʦ{AqC 5dNRֲ­D$&=DWv<^Ψ1f[E l]Q=hK ]sm >Ho")&ZDRc"($5s=FNwiAaQՋͨ96j%LlF*Rn$5n$AՙdBmn*(41j $ŭRajIDgEJXSh3@:rX[AZ6By;JlnHA7GC(*qsw77g RЈ=*߿$wY Z|֊ccͮVL"5:5ch6J57UɾWJCڙeP6# ޖqf V?LrڪۋHh\#CCL2XuL7*ߤ)]t7HC(QϵX<yaZ$^$xӉH*M9d{^b0\[0afKNYmk^G+_]*_s^_;3u8BiE-kgnO^-eRNP hjS*↪LB0 D4oDx{ hJ~7b_ڢtn?U6L6UsrLrІJntʼj1++x&_dMnf*#^"cΪ;P0+Xfm},Иaȥ%,(g>k`WDCvjg ijbYOSZ"|u!B䛶K%J ‚⺨/?G Q=జ^I/(,g{ ʡ.uM?x6Ny{MR ;5!"PN&k'ě{)i>Qʖ{z1.50G)FT5ޣ{m| Ɲ &\ a~'r+WaJ=aT!jQ}\-&${˟V٦3HTL`[,wm6o X% g?cĽ~(U1F_7m[Tgc:6"^?E+%Q:t;_`NT4r41wFK?G%SL qWIN{ f3qfnxrCȦzwhDv?d{97˶6/+<36|zt @~$llj]`PD"cpQ'%湰oӣfP͊.DA]*[o\UYr酙~w wZF,=NH"N"fYdFLaA-s5Dtw@.v;QgֿPvg3uSPK+9}L < ]^l%ӏS?Iמӣ8C`ҡ@i h}cY6 3FZ(K=tF!~2PpZ}8Mr  '3i. xAĆZ*`L`"cM%Q5lLS`t6})D!L7."7Gs[:ORv#q8;NmvGT"Sk&ݿsz9dRh00g=phr{fZ /_.fr5b?g~9Nw\p?<ǵ$=!g,\jQ=nyz[p֓7%I/ x,֡~9{0^8w!?_zl.oϹ_٫?V?>%Ooć[ӫWlݫ4@~_l7s2G9CUsښyxw,{w6iep&4~Ҧ_KNFc1ο}m }୏/'S,%Jn8ٟ^t̥9KN'%bkFXү.^mnŧ)S0ƨcq\rw͹M-X|'"3=_ޘ.zsTLU/0/aZ`P)FCY?N]&N?T߼o<wwUBg-~O#~wbQFo4p]|s,o~r~o֒p2WAv7(aW翜Ζ߅?\|7A?}6jOUgi2'tW3$ú">]oO7:ƋW IֲgAZ^>2%\wp`Og> |iE/'qz iWM4/F??\&{?@ >?N^4>Kg`֨)Ah|giOv}Nx`u 9O!?Sb^.m|I"a!/-p8bmČI HPinѱ*b->]:X3gSh oF~оBC#R{o5LI%ֆ4¬"X9I 3Yksv&x|+ԵY_\zم XEE*YmRrjuelLmVf!5"XoxF!w1RJ ²c1X [m,T5wnÿQY9e9eEύĨܨZiEwsM6ƵvmzZR=aD`-M'gV5By!b )ox` ۟dV8<Ե/J jCRr<{oMI_h^&{#M"pH2cLju: ,T#b8{MuBI_9MﲵTb]6!?.%ܾIRSskե^(A ƒ`!0%HĻpsPz1K+RssSJ XGPuRMBIc"V&8PXu̕&?Y6`dA] aYCwI]ҿ,I})bY~>$_g^(mEqmɕ#I a1 w>s+WO>zw8R}?Be-MTX?;q&fkL]'UՖKkDӐ].,*1_1a c7ɱ1@DJ1dMdn""HI'ce';L1\ͽ9x}fGiMDkQ)sͧ.WX f [(R&k/ㄆy<%*3gJ+C΢"Z nR3F%DNe! 1Ik8LT$Y˅ 6sL3{`Bs6v>;)y^PFD&4o\-g"5lظZtm'r69XB{F1c?t~^X1̦aװw@z" ]I= ,F>8M1#89#&g,f`ΰ %~^DOȀ+ûnzmŽ:i6V,)Cq IJ`(#'G7[$`GoAλtp!r:z:YάsM';cև:{/o+F3 G^r&$a`V)}@$fwYrP:6J婵F}1d,N$)-VY*O Kc_{-0z9s#$X ha;8F8P)f8XOYrLlL{q8A0#KD!ܒQ$G&q`E gxG::,V|6 #z㑂RlfD;0+J0'.\υ*  o"+o}p _&eo#Vw/ztnTvK+W$ ^IJ}}IT|ӯcNt 0!\E  JD^q-AEs#F޵qBK3UW=HJp؈ڻ ɵ-栗ܥfyY93X4 ["_Uw]*i̴6+2[I)@Z1K=:&O 0΢# RAg93!s˹ 'f2xo SLZ1dr +2Tb99-uZ.`LpLxV:g,wUbLQrff.Ӄ0KPo~)I䠀?N{EY74\1o__&tD54?0go.HƓG6XY/[^M0jo77nύOHCM+ͰJ *:dEM\"< Ȓ=I^Od*29BVY>f2T(2-gY^{CCs?Vw9gw8]PpdAf,+3׹avL(EI^+[a R9n_ ظ_.^r-Gd[-̚t}wȘ-\rZ f-W9rՖH( ez}Dw; ջKy;j=Hڍwf)ۋF*yrFm!oƒ*y3$q7z V1 sxWifI7R0\o[Xer2, @EpeS:Gg}.vGC \gJOu-T 'i#ä(c|4@"p6 #y;XdEAI&Je.X a!Fl\h S?x"ٕg!!YdJFD(N@ A7DzpIJ~4*XΑ$$oKm ̉G< 0SNn/c J!'yƖGBN&ZWl%ʌ֗1u Zoj5k\|&W_3;ٛ;FL4d]wY=Yネdʢݕ q]];L *G<+l=ok{?01Vׯ l1owQІL׆syzOs|s#7C~Y_\ sllCUko"rS FXR2T>83m.ނt9+b7Ww>iszTPagJJC?:\ 搂'obz@l]sB*E͠pG ZAQRj/siҾe`:%Nj0rM z˅y(9xd̢T$gy z= „34$Γ細J<%k bA!uM58deɹґiֈ#b <3Zj)GȭȽYTl"GBcdAjT cbL #ۓ\& k, $o\@HP\y eO AcDr]!dAt,2Z!Wo>_Ggȑ/(}i6Ŵ#ΗQ˙V=t]UM@Ŕ"iKw>$4~L/K:H&i]P⥴fwŜZ>Qfv R4@i\kbw[+> ok2YWj!37PR0A=|U!Sգؽl{7y| KUWk2_DOn6B囇IŒSGsfx7{XGۛ/Ϭ@:<Г-8 L;&kYy8KLOc& Șri =2>=W.kٵ޹0sGfӒ1=*kX%I^OF__֌f1ۍ}kZac廆|wH 7w7sLU5wʣaoO^5o4Bx y~8%mnjطi_kL|4C?!^gLG}Oc[A-紏Ӭ^A#w.N5 ߦCM>GߵC㠱^ޣ@ve^ph;ul=OG{Z/D; i@ӎ{gj2ڡiyTkp1{fשׂ̠=.n-F^`ң {f!JuW:koP@;@a1s/q!^A~e-Ssѐf6 [$Z!ppUQx)yvzJb.Xbm8ۈ%=]ݦYy)DpͷާeNb-օlyk6'^waV (}/A әՊPS9K}cY]wJ,hܕToSzUO?0MjCP;Cr/f!1mb2 !㝧/|$o0*n&Ubݱ6~"qɳ>C7L8U*‘bp2FeFz "JSDJ<,_=u_;`Mٹ-h̬)f>MV@!Or[Ηݪ˝P%L -rg\.T J.w^ 5Jx["zAW9¬7_\nKuL5'!Rptyc66[INўCϓj`YvPy$9i%!qZ فӴ,H-ZA>_|1]łq !8'0>ne cٵe/+'s9%$Z!plX#A!EP)2+$$ Yd+?{W䶍_8;ЮĻu{T~ȞL$8% B  8eE S.'Ԏxޢ/3;o/%V^@-!C`v1>vfbC3i"B1-a:V-(Wځ{9^[}5WmgS)\'m VB ^AЕF~?|2( 02Xڌy2#4I`|YmIOL+;O} 7dE<)ϑAt}Nr(Jv&>K6rvD1,Y8lԨ'7OwwwBhIR #DԌ)P-F7N4z|On/zpTd|Ao޿yoo鷅B ױ3 fU =!4Ua "::4Pqd\a="Ty[]vvXqw3jQ?KRQlT Aˋ"URSG&{sXE 0EP*κH4]K JY9?G};>N^R,b@k')-6Y eLyVIwVLӃ0˩\8Ęd1& c-w@ԀJTV 6 4iƽ;sҩϔy*0=)3̷}o4sۥckaJ_(x }DR i0+f]24w8m^cHg?OrmC{ s݀HVEQcTAuVe粁QW #"YZ+7=[D]>9hHnF0ԧW0uwZCB~1JaWDqkPJ]r%x(BXFjcў"VcMT') OGGN/)$[/ii$)811P'WΟƳiQ n>$>n""`cwqFqQxū OM<mnYa)\nCo^~̌͝0ϫ?H^H^H^H^\!JYm_޼lEqЉlj6؜I>**vy&azLn5ikV4"%+L鷦ې$&-ec/{Tn+D*B0kD7X }-VA?}k(iTOrSA$l [m_>:rr&jɈI*_^3f3̈́t%_W졖Vz!+/C{ J[Y7d)!ÔJ#28Pܡ++MhdF`hQc{nb#t%(wbxstWyZRuz92GU{1\l5?C0QvlFj\ϊ!HIVط!p&hdYERLqr]QP,u`kD@0#oEpJeGɲ+"&p]Z<3Oʋ ^TTO^3A"ՓCf+8;m˰5%{lx%u@h[ QsTR$^|%EqC0\?S箨Qߛ̻S3_rljNj^Ԓ$S0J:mОL/"QBVܔGJR uӪ .Z̏^ugGZeeGq@V.:J=f@1n)G##;ONXےx;QֳM)7]G!:Go;KNZ EsFKk ^:u[ " i2=߅n鏐6,dLȴ׸Y 51hQ k4r#A3󀘴Nb$dUP.20HE1nQmV-> 4>mTV$3(hലLBB(q\m#T 2hi- u$(lz[k @'s& -Q84FW{GB050b^0NH/3V4N/+/s2<Kn`̞rf^sa7R|l..oOh)qyWN^Zg]9yw(9@|)O/bS,'uVסA7wMVHB.0-zmj Q%`i9PoupI ;)!(i 9bi(ҩ tK'[|460}Ϊ9v.8f= 3} ]"^ez٣Y~t}gv<ρFmbPʥJZb `3N;r]L+,I';=- fJPJzM?ߨfb2?>N;{ܵ(Z8fe< CT[7 ͌w< -rB!c.Ί:4 kvꍽ;g޾ ,QhDa2j yhJw6 KyazǨJwSd т+\^ۥ )ys$ޛj i}hP pƒ$DgmlvXoiy?w)WFE1bu0cpᑂ[;JU_&㨃2jBDZW689p]z}e)H!PWcN1Ht4 k$ 1O`w"r3+$l6\P3?ï z_p@:^te;vZXR: VL#7!tA?XDf0rYCh*ջhETAlt\mVaY2!?rXmvRTKlhXevQ$9r⬑B}J"'W`:UDlf_~FСaJw-B91wPrC[azd;[xGec&CNIuH,737} CmbZgO ǧf@ WOkk1 UqD >y~S!kx)L'{d0.R85 ki!++2B2LW)|iS=ZCV@@ۣmE\\ LJˁo7K.!^SvJ>O1:9xux]Z;cm\Xdy8Vaqaiv%1-_V')$85PAm|T1g-$=T?y? nյow Gyr;5=*/߄tʊ8tv bC4'b7ފ}hrp$".))L(BxB]?vvVOg{+#QK$C5 Ah)=H0`N?aJ"jZ*r !s99p09 03/\s9EJ2 F;朣!)tu$:S4'pBKia<^\"8\!=@DF[}|1@$K5;з &4rOruQQ?KATO~-S*AV&4ZNsM4prf|_hX"05̵=r\b'* 8X dLaIc+d.bo ?L׺P~<nV#;_i^>s DbXѧȶϷi bÙ W׫uq+hbOuQyY4Q-mY==?>[˅@ϟ }MW|΅0FDdd~w¯?M|\K{&|B߹ճ_W%;+~xL[=_O/ݽ &]q)=7E/e7: :zC)UJX>j\0So*@fP,,ڌyβ`##s&`Qb,8)ŰH86tH1*@qȑB GIpaR:] Ƭ%$D5[va%4Į"Sq Vk0g(4I1HbzV`IJ%8!)XAʝPRkްeֵ^q~: <49 d=J6_CtT)SYe9jKv/J&PP Jk٥rB&%*@ #zr(gDuU+/EZqCUaF?'w.%DD*O|=& [9uGy$(T֎,B\!p)C.H.p:.n?xbD!h ݫ~{e;`T{]ҵ{Z^>lQ%Ǐ3;Py_6ַHĩm+xs7G -E1y8&'&or0‡(uLJVwN֪h{Fݨdizn< bbnenWc~2 ~Z4{[߄!9;rI1'u#Xc(`R8d8^>.qnn,}Bt JT}I-X %ÄvVSDhGьÈ q{׆RRNOS,_u=߰r_ou?3Wh1_n/?f}Os"UuHp]Nհz3K!iNUp0&(G ܁֔bm<y&&q;Fy|YLq0VucF4pٻ7$W~Ydy f;xBȳE/7ٲ$"YլbUhY"28"'Q!s7>1"SLRk#"yB AЊ`YNu-c$ %"j:9Nd 9)%h 3NAxB^C>`%n@=t[knF+L'٠`NE .KVY":2)QA -+L<0ruxs9. ժԡS,su?-Ywzd;u(\IIhLrdWzт#F fX4kAOi&jy"ՠ:)p㯇"8BF rݣBo(}6Ԏ KrIat6d)$z  jR #Rkn: A Yu:׿Su:X٘I4R@DnV KX :xj^COM@ЌA>΃$h <{e%w^g$7-,Ps3 !.IaU7iB4A 'fҟOL+i^o y\H+>&$٤*PޡzpP^ ʼnNPtSp\翦Pɿ $@a iT^V&M>eOM_~M&\ڴ_KB;NIȃW.H×!̘-8V"$}enkNx}|T?뱟%U=кЎZC=jc6EU/HڨR ֵb׵"#BiI *\*0lV KnJjqUxD&ғ"$ϩ_ ъLcMѪPFЄo)~Mf#s0u9YYx¸Cvo^_\~cwxeֺ.nxGo(O'l&5{5I Q &kj0"#<GKq!RD1GM(QDJ%)|i1<$bI&`Jʾ*0uNAa*ЎE'Ti  8ÈM!(i" F{oYFhu5M%fy Lef4SJ9܊(eE('+|d8eEN$h7ӐaɓI4dnMu~bw%.f Oap$AA)  Zh)LR\u޴AcNH"m!BL\&s B+љv❕GA!G VbZ8*՗)'@O01%=i>*6P}'tASMUeF Jl${*p_W7mxVhB:xɢ %[+q>j}*#8?fTd76  CǷwfp ~q^uUD-JD١MBv0T|Lg--PڽYt#:zhzY5x!L$D5i -,}N5 DTL.3Gd79 W\{)#=)i@ApQ 5ZՀ )1E9DEK1ö&DbɉmL¼\_VMW !H.:e'ߑ0YZHk4kI7eeiJjI4kI őb^̧Rnߪ5"SQǥOɯw<3 8njtS })GM'! :ZM1 E:?w./#:&&".qNq={0" Z0esWSρ44q'\nה쌴s'&!T+a41 6ɛbjjAԴ?-ÀO[}Yt!hUTclz d$8 цTIIFne>TFLs=\WIJuBI*cьQ4´I,a>x+p3MvHo;+صQ3835:KzgIr&87hG{F^k8"]QEtz4P]JR{sDӲf9kq2]7<Է͟Ŗ_5E|&H Oyڕ\O<,]DˀqaHqL2SO.^ ^~z~ڎq1. ] ȌnL^%c;o;K5x!vV^rɆwҧNZp az)^\$W<+y}1K 9^tcU&4BiA1ĐSNa&a"^3I+ۅޓ 6J͗|M%j)g~0{cߙj 7x|[7M VÛ?n;:tah,06G y{NGw׏W\|7~+0Off^W/XY/dZW8Ds9ÁJ%>{zKL/c)l{n{n~XN!#s%q%3ˮ;+ˈ|\+XryXBY(V{wYkO2O&*ٵv޹0OJB3n̼0πGƼ/ض{g`!ܰD74g[Nҙ 糡-9|C~&~N?>[䴣ƫOԶ5P>Є0ozEE֨}OP(5&{NE~\=eSaCz_p(ҹ2k"^0)=e7УMzbYse PtR߻{EJp-$Uij 74锂\+CG&^y\^0@ՅGf94WǥKsF:P Bns$ 1@X e \K"ЧA-sL06WY"R{>7V,ϭ \δb(8')~z-"Jpi#)uJoxd28GuGqyנ(H'\ĵ⹑鸹r# 3M@n:g`|_ ,NqN,i$5lU0k׺ AN7>{\,!` }}`:uTTkwʜ1\29e$E2߿_FdInkgu.ܑjF1xtpp 1D$Ga(&ZP"VDol䳕>o o)ŴX,Q:5߬4pxnаيh^ď79͛.Pxr(9]/ M?/Enp%xflǣD!?bɖ|` ;2bd~q4%$1 te = \~93|bbP zWz%*S ѴG v]c 6>EKaDpN&[Ml@dbMsh0)HA]+AUӋL:\mM JK%<wO]kh6`>*Z)]3]ZCl(.(l~(03JO8Y#aX355am ?rJ."`6FN~wP#tߧ7Tgja:PcԺ/O|U9fgۇ'ia:;{vwOve~Sr_޺UjcϚss,\ dRv \zs@4b@!QGu7Z0*>1:8h>>lf=|G4c{z}>nmc[o*p7Ot>`B+u ~FEāØO+g_t tq>hCN`PU/gVg/ oE%=`}0PmoQrջ]/msZw&wem$IzR3 x_=†{fa)q[$}HJ*YEnSŬ/"Ȋ`RڂF l9 ۺq)rme׹NMȢюLJgY\x;f/?\{wCnqw:[TU>$||3@b:q[Bs&%=W5O<LWf'[гko׏woyϗ?@U~ ޖ:g-gH30ǫ4 -7#$aQ {+_EhW=dѷ5B! 3RRh|J90zԀ=+Lxq&Դ#g;^t/ %]ZBoG}y=)nͩ8ᬀ̀A/WT'Zo EV9Oah_jުQTUc&d;5.Wk'Ic,VE;;yis$2V)FxȘF}$JzȉJٲ (զfX]#; cӮ"8qT3Gj'O&ƨn.m fB ΍A}YdT.GRi g 'x>5z6 {~$j)>b˨hO+ySў^?7C?n׭@[ZrҞ] 3 -1  -!|KmRħGtF!9^̨wkh[_]|}oya}+p"{ƭͲ[Wtέ[[RmV j+6_97/4QŨmsWt+9˞oImCƈ-SNO9Qk<Yd4RhJK;hxOO Z=ٮ֮^5t_N)^, ?6]̏ig;Y fI"T}DZ=.k鹩l $AU QG^xsq{ydž_(n6`xԚb̍? &Đ, !?O_sCjCIĈ*0 X.&B'N wSLrRFR-ɵn$(QJ&%"Qo Th.HZD`t$ P+έՆ?;2*A Ju۵fgZDzeA/6G28%fĖ9yA7->3 $jw MWiBM;;qp/*}qptl^UL.Oig M/k nO]O(ǂf~d27m`WI. msJ:;da䯳BŊi-6q=Rt B&#oe.07p ^')y&!ɱ-Sf|VYPӔY `8e|T=xM)񢸖ZQjFZ/i6_rd&Y 襖5}%pG* eJsUR0MzzrT903N4K,ofwoIK9n.ܱ*/t7Y廓/∽o7沘gپU_gVkk,\7hD{;7?p=&% O.?R9'fS1) y"> fr o5F/OxfXZS eJ}S: &[QK2ھj8 FihkᔔO"IAa-,O"eFx@b=ʧЭe-OxkYKmwjd4e pj+254F-̯c>)@6gcM-ш$ lp, 3&\,M-m$ [8Zŵc 6q7.p {| T/p{X?WC@8nZE4} 'X_Mt8Z'"[LQ{pz*kDMBCLhq{N+h(: 5WWTܳ&y_]D>ntd@%'hZ{HhAYh"%hJ p"& ob*`&TY#gP.àlDA,Fm]wя&b+ tq(EMH*Figh0 ErꊢEYW|_ݴ(h7Ug2%\QT {+G3o9¶L0YSR /QO<,x@@sgx}A }h+kr"G:rcسkmdA`X(KDDE4Pe:z4M:@/]$<"C5ĵ&Z"Ǩ7:$ ?*s0=gk{1FzE=#zm:%hTNVT 2(<]fslZ96gRmk#.!-4LiCz4%0TÓb,\KPݰa%.! -1Qn|m\2;3/YɺxISjGqúnz3#&&Ub 9|9s01-6I511.Y ,DC3ν QaA"5I:%Wpx\_H7VSUvqT)9R$t+1/kNA)pwR)_q!3x7,%5h8\ȔԵ 6NȠtsLQ% v렶Qb2r '~-5y5Q1z=h_ec qCf1%N"G6s5Q@6D21}a Dp̣Q{bq j5)CfP VN2*IO15Ud[Csb\ф#H~ U5ntfl3Y5נ֘)B86CjKapӧǩ(hD˽#B\Ӭ& (/Z4PЉ+[0Y.F`Qm V TP9T TJ}ikB%jlÙ,odgƯRywˆi`z;8n)\Mk}tPXoti$mJؗssU~%.ʭ&~zŝ-8]1зm>]O_l%kK.G:4ɑ[gh,/$ő]>-,/Ny_ kke;g6ɓgO6qg5~ҚI8o#o3@iuh|n|xYǍl)pM3tj3Pճ}KU # #R<|` rR3TBYU/dO2Dപ2Tѭn-+\d-@| czup4E牡kZdm;?6yRXb)O68 Ndd`5AϥDdmO.Uĉ+( di[Q슯l h}zvx#jvWZTYquM6%kKuNL>g wر'J)8Bw>Hd'mvn 鬣QG/9aKH& /4 y!?i:7u>w!w3Xkp?M¼8k{ oCU ,MnSvGm+iyஞPTi)d7_hkM/Zf]TvNFSMe9ʁqzZr++W.뜇+żf0yȭtmlDxI|?G@0i== VmgFGjXlSnq_n%8pl cCaqpmhi[]3}Gح8lF^ڴ;e[МζkPiѳwm%w3 V,e˵e%T&ny۩51Wtj |x&F | ʽ 6uelJc5{|5Wc|Ʀ;9+U9 ۙ;1⤹ =Fy[mQ GM6."-9mmNɯOQxoG!ǀŞ$*c3Jh^];k=/͋eǖĨF1qOC7ިťF+ot6{z&%K [M,7*ma[eI Y5-YUQ]L#ph{( ^jr8Kҩm^kں+m`K*\SnTaAUX C*F'JpVeBsBӔ(DXAlr%\$JG]R\ZWتhNDjN5T?R2S) m4-7ogg8Wo@:Z Nu:UXkg e$9X=(J@].GyleQ5Q&; b Rn 2%I5;JKU>2xĐ'ue[j`X_J}v$`t0L͞zs^(lbЎ~8z/q:>Ma`|\Xdr?1~+o/f=0CfoTL@fd'[>M?^:'ɮM`7/gs0"8-˜dg33o4dGMh%ʝUwATGv8inӴwFz:,+7 6qɦKH帾]&Kmq۶=\aӶqU)VO. HN$?5SS3A@֍ɯC@&}n(@ kV) 9!?XCe)xВ 8 %et Zd\ptb yYb\ Y.%g[TAXlP]JI߷5 h Q0lJNN&ljD*Uz7%o{6k?.$Hf@wLRKAXVbCaYeXVlNsT5ͲZAcYV밐DؔAfM@;ޭ*1Se3elͻUFz:,+7mJ¼MUҭU^IF0Ҍo*AM(HU3(+%c-y.8ˍY*SGaJMosf JJhby怙&,L Gxv-Qg߭"9D-:(cujO`RMFKG3ma9:Pݘn֪Ӕ=_6.*I^*ُu^b(5lq)e'[Jٷ<'J6 G@61.0RpS vֆ)ZB 浗y[tܦX4żyJd6bIEJP7ȋf0tP^5 78#7LuJOciг8&"% 69:nt1WpDu:e%0W[e?98/j?ͷ~ X砗Y|zl2e:͟$† _6||j3Jקsȱ4e)w`cT;R*JH!d*[=.CYVVtTҫz޼Ry]-κ5oA{ #g@"iQznG*ĵz5.%8 {(tH;%Jg F*vÎ!cߖ*n]C&d\ jW65ӚfiZ ; PJvYΟW?dҟדOZNbqIL-Ib첪`(RuK<_Ҩr={0ji^=0KKr42F>}~./s &%x xmb @I~W_7}6Ǔ\ Q&O'LhLJзԀw=o^=yFHD}Ep,(dr (Addl⇋C0͍sQ&ǻ0k*MnCBfFVʛ|}1nTbo2~PVSČFtdﮝz+Aח\(R2z Dt0&KS(˜DRĥL&Y/?qޣ:CR"Mw\ X÷+w`NǩaGF0u8}@!G8Lvk.MB;o8AG~d bb.9I! _Bc؂wx%n9AMU@u!7NH#kɶOo V> v`O%")bzL'߲:ZILɧ,c.q~2_}srHa'ŀ V#;a}8b%-gKqKfBtqꚂ@{ tThB!%c(F|RP kHsfWA&t_ -P#h2O-tGL]J>B>̏#s0/:9OpȜ"Qjh),&+x<ɦRTHʉL&)g1@Djц50U)Hu*lyBͻOOҿ{L&&P˽-&naFU`Rxܙƿ /-dhN͛g~UT>)'Ryo= @,&z'XDI˩7{ͬ{ٷO{=z{3L^C\]xqW^|gVTw|}_}wūOt p y߻ˋ1xaQ0&Cf[Ga}Wg)^?҇if4E̻o;oBͻwaY{g* Kdj%͌s֣3qbCHGI\<3|FYBet~U'`A0ɽ;{͔MٗroLƷEGOfq45Gqi_юqF?B|2,d4gο5-(QV S,WzB!l}3VFhuVhuVhuVۣUc`$[*mr4$/pa| 㝫@zqDy^3PpЙmKܩS۰sIp8|ڗS~AK [#9(G#-cjQ1`2K!v"D;NkiIR C"ڸf!FÄN\]Bu//j3Ԍfl|ۛ|_f0=ܛš6DÜ)ngA5SeBQo(JBSr 00U",a`xFkJXpQ9Mf՗H2 "ZwC߽}:wd^L +$ dHj缃HA(",0-ZY|KcɼިW՛W\AĎ+HeOûc؏W] 0@|!=3om_P4;ui)hm س_biz-ajdz[_[=: ȁ >WE(!}م~ke%R\O;= {PY{X]Pe [ *jڪvMu],6#B^#iVsuYдuEȰMsBU( 7ņEG -k#Hw<j48Vj0 kOUڝ(DAi֦ʗJ#JN:xdd5[N¦=hӳ&$WL?}dx$UUM;k[D9:La|&Yc8=vY_~ZhB,lfz!ѦR/\\FڼE.[NiL C#[bi-  >x9q`ӥAxPLo0NNnH2G~w*^1.ł2Nw@.hEx=2Q_}!Q̧VZH:4F[H7$AB)$srT(2­b ]y)](̋l4Hl+BjFFUnY2G9k\ccfMcq!%b wNܡqN 53rjyV9~,vvSr>VܮvWUMʇ<^c.K;ZZ_tEܔ0ĴhJM-d0!1Dsl(4rYD+4c|blmfN~G8q׹$AAVtiGt3zqF8 w 8"F!1]aoEv.eJDS\fؓFƀ.5y6X st@jޭ2 7jlegk|w*+H[)eOu{z VZp|!P [(eq`1ǖpZ̻ ++Gap-1 5'/−FPP%bbJ8az rϴ8@ts G}@ωC(q(?V5ޙ+|v.b%k>ט5FaWyФBi9Iݔ{ڰO#Zc0*kzx-m^HBFVzh`F آ`]Z'5ZQpȅafs+FA+Q*B$JXB)qޔ8oJhKh;Dl*em<E#f܀ aSF 㭉NsŝWj'R;`6 +ד WϘe2C cCdoDŽ&!IG L<$\ RWVMZ.,S?9lj5\$cFz9Ւc0UjC>5V鐻O~Yё΋/"oU򩋞ByFI6EPMCc؜5G#5$PY} UQ)OTmAhNj^%\\IiYv֒ҦB<@')&,䤑|jMCgRFɥ27F)R]SL?ǣ߽h+wZT)2^N-5HX316eZj$${Mr3;lFUYc 4`m29G+0IXJ)OFGCL#IXS.٩Jvn'd h4}Fb =gEș [uKa& {j.n0q"_bC(.FpnEkkvԊ 5:y0ðQH!AgL%c .bTG)9߳4%җsJ'{yɼ]b5u;[4]aŢݽ9Qor=`1o\_3_ăÜ&j FcPʋ:!G3#P-T$qZk72c:8j&9?y&~ܼ3vB7[H/ok>~Pj{̘kET!aU 0"g.]L^DWq@tbNaT)HHd"*1I`g|z'U._4Lʑ $BHy0j2W* kP3KMM(Vր(Fǔ1\ۙ\&)oAn >Dh OiBL93|x̮5c]:Qhqؕ&a2f<˨%&EA`(_(p!ye*lz/'#d}$N Гm:jPToOb&b)<yk檕zqD# I݁8JIy q$GFtᾊ-W8!oGڗoI2FC4b'j7'Q >Q0xL8ϻzbHH,nU"tsl41Wbd~{օ􆦹zAj˟D&RZp͛<z`@f"GCt}K'=x! O$b>ɠc @0#補hY Lh" \׏*`Riԁld#g)x`٩FZ4r;^Ø>Ydh@WmBզ(}N.bVthpY80)p& ";2kˤ4j~TTp"_ِtDgp)H`@`ёJ!52Âus+pr!?\@)^dоwo,Ƿ$(͕vn_^zP[H=#{yf_NP]Ct?Q\&U9 @7ϰ'D zBrkTTUlG/+1J|Fi2 9P+q&H!})9Җj}d† z4+myoq~ vs[c|?]_A5[@nveT@a4watݾzk\?[f5nRXi8X\: [(8{.WY NjwaeO1N>nzvfn'5>2*GA2Dw$F y8bZ=:kRm3Lhw~U(`X ~HVdgpi+\sO~I%ՋROylP}'ILW{6KծbqboΏC',jj-Wn.jh4z/HaP*nmJk ښ9Jء>b6y^,^o.3eTS.E&!: V3\ǯQLi͚"kQ񂩦uWQzT7[a'miZz{ӴP^UxqsPi:l\yWzqSc /+#c^Ntj, 4)bfpW[ϝ&'[ !g=LNb'[X;,0|5ZZG^I(Cj€Ffnz: P/:x }{1BJt@[ffm֣rAE\y-ʎ ݭTok=r'L4MZlӺH`mKg R0z笕u{Hs*x^,Daƴ!{âij%8 ՌT,,k¢kz# K>>h*|,²Υ,,Y 9c`gwqod݋/iNbN>8̸Gm]}=p0L]~==pTsstdQUQu8]h%iaE+R+|ӥ vPյ-U]Zˏ=%0wqȐ+>ݹQsXl& zCe/rTt7?d y}k^v[j$0ݹ0Z|[ dџϚ}v?p9ҦG=I!0̚5)8BQ^dM ɮ{mڒCSJ9M 5#iRpPάP&GbdJH$L6Pt0?)z)˩m#XuXS5=ϓ| pڟLfAj$E׋H,V*[6xBsƘӂq4l&B9(A!aBtMVn~UM?l5WW8[cXRg%;/aP)kY},5-9y@!lʨ5- X%Ҵ _2֪pU O7FU/yˬP:)S}i0#,xm,D΄GRYՈ4_&ŁA>a8PRʋƘQ8R\'NBJe\05RNH*=Wfq١WtO./x_ lڶR RA-d_hEzb0+)OjҸFkwt蛉 RltT.v[IevJath1=V#B7E`<>ui"i9:!oZ1$$Dtt \H^:P'=W7|qK89%R>h)ayfRBIgDAK A k䀠j=  O_c7Vj_{lbظǶ7`^noeC*ٓZCbgͿ3hϐI+HEGͦx~ش[N™Aed@UYS K1bFVv8@w߾68<~%|뀷?f# NddۻwS~&)Z~p]Cr$}t_6u~Ow"g9\RS#eq ^I^MQ:JW{w Z_~[㝷m Z"4kmF/~ p fcd+N&GdZ[mXn5*Vz쁴] |a5W@Kk.L vb:R)'%'I`A4u;Y,I`WP +U3+i:uBbյEHbNp-F~+D+iuB"ASW|HSf=$FʨR"!k,""u-3 qM؟]T[4`&1 )҆I8`<(o&UoUxPg10?jáV>DT%m0Nհ,=F@:3n1X鞞RAAeFCa0}9, 7_e#Rf|v;>,PwqJS| і sNgC"?O춑ALѱcVO7;MW\5Q(Zhӵm1E!6YH_,6xmYYEg;w9~YԨońc hqr:w ^]?TѪ_72:ʃEtkbǩCEZ'ˈ`RP" /9AQ _m^Kxty[U=ckKPsA{? r 8zI_ZSgPQ=o¬iY=)lZp1DQpR8X||;㒊%݅HR%F*,u dc,FEGSP *i}]ߟ_B0*ꉽ\L'x͂j%ae)!]X)ΔNԙESbՇ8Ӛќ3f/9l1Ti%S D`Xb5dqVkyғ[^zQ -I )Pg UX0DX9 Kw`Ӑ*6gD{ZG9Bu:| F u5ۜ@a 5JB9cޔkwr! B`0,$E BI0B&XPIҝ uMuXB|mD?҅*O}=?fv% WcoeJw.I2$;}/U1Gڡ׆/-z,IX߲fU׋ٝG?>9JULX:&^~ OkVg/h˥V1IbV(p`2 -FT6u.A# #5:!(?^u4}ޣIF[XWL E Ēv l@ Ic 2#z$Wy] `&2(j)5\0-/5tţNv( BZrtrt.5MEu[zjx3I9K !}by&L3Xu "Y]נGs Q8>Z:tn`G獬+{ şwq-rȼI]4?yb1W,];(1!#V}꾞: ,.a0KP(l 0 e=X07uP![z@ZO'!lKy7a=9NW/bp+^؜/oW l*qUϊy/#BڗI`-`=dY\`oTc4 ɶkdlO*!c sk,IΉpyNQHC94J P! SdA.1T9+wTQhZ0PZ^c~/Z#\jST9D-PPKD49p9 * BQ5%uI|[>F:Jl)ApϸP}Ƽ@:p4p3(|SI\ fn;I椉/ b_ hWAK"E'Cbt ZB˹ɂb W(/RpieP)XeL#"ђUЂhK ;$[fDn^Lbݺylwfkl~|s{+hs*m=#DD^ t*I\GNJr@., sFyGqi &=o<+D{=8%;=oz/a^CG J{X߻ `ZD9oN AlCρgCq)DAq\B KٽbhC  u AjH !H?v4i!TO;|MP5X**YkpA'bA/6$`!G [RDNNbMNcS# x]L0Jc$pξna_j`<=kqƭg1?ȭsk3t/B $wD=]cgEZu[Z'K/ا_Z6V9ek&YVt &%v>(G%A]շdpWH:$U29XAC,س.{mKIj`{[EdDp I \ ò*VfF} nIJ1j`yw>nMf ˻P2*fZr38h$nTg?}5 F9˛Be6"#n@A S#mUFEƤVH)b{O- eXNI1,/0+ 0^a\3@K#5PkCT+ Ea%iCBFC UF ,rʡDNeAK173z_ maڢ98'<ׂ 2+00ć\1@9:p)_A"zӏvhn:6jکA0TLU}Ɓ$4'G&PD[5a*]|*MhA0Yg6Y-Vd "p-ߚUO}?Vb1zw[&<=}& 4(&Ud%q ɞ94 v;$$^sTn_xZMbCpA! `y;go!0%\0PUAY?skhsg&Z#wYkl/#(kX#SYpm 4_E%۪k/\A)5sሓ`GZ\t:>FurT : ?!KBXffT)HAnRU'41؈PNks7=g;F~E(p'xboV)<)^y aTϲ`t(M@ެ^;@M/6 Λ- Kpm$}){G  +B1IaI,A#I^^^ơD=Ng( *B +.іc(1@1`SEHJ(GR'-ÈbRf"Jzص ؇SR: a!cbZ~=X$Z`^xe>fSx7忽i[?8ujLޫգ_M'z+c_3``' eֻ=kٓWlr[Rfwv2HV]|,ɪJs'-QJI ۆsg,3Z+q:Jk!g:q ,cFhQzasi;Nbn&H%Mm SLpjM\aւjKb8ɋFw>cfsY 4i]ڋ DrYPEZ䙱]ɩ) PccsjYj jc /&+yQڇ T _RMʰ[ۗ化sVj.2q3&ɜBSYڱbmn^b81pfZG s-5%#hK+g9bb-=axF Qdpje93qzhII-QsNxb(K YX!Cw'Uݞ/W ) ]~ v XSM߲RiICES͏-NQ3 ,\;etU 0DZZk0w yiA4PpGjT;߰FQ]yޞN=0"{R;"7+{#Q Hcnudjm8v?PL(r>AcND)-_,Cά5kɂ$'Jvg|O&:TconZ.qf.3f 1pR39AIy-BQ+eUύV9JO-~(qxe"er?8¡C%B#(Hoqs[qPʙ&e&s HLA 8J|!]jI@|T~ۉ<6Tt|Y҇/&&0x/[w_7?gZ ~i񱝷tadhg~}x(wd~._^0&Sd }CO?@nu?7`ds(ͮyܻq ' Ǯ'⢉5KDTBS8ddwD j@f,q&Y!eNZVZ8fKʈh׆q̎u_0w'x64mhQH%&wIC MdBxa6zLn6iHZ2ҊbmƀIb l0l"%7%G"d28^ {utZ3'0O/ۮKivuw@UhcfJ*=iTރoO'D'/8lS'ǃ(}; ~*Btv&Ε'?u j*UDHV0jYQ;%*S`Miqy Ug֟T[j$vd߸zzCOBL(=릈/5z"*1)Cpyokֆ6CK~.WV'9{ZY;2SNG |rBYiu5^Z]-ipZxXdFPQ%lt<@Z:?a9H9'Á`hHh||U<>b{@w]I L4MTI`÷KȩtOn )D1wPy5ci/SZ:#I^*mc8$Ǧ( "sW 8)1E/%i)gpH+DKpN.f5-JKjʼЊi $+ gz w=BOTx!N8ҢE].d%Hxa$4繲:.(Hy)p2cۋxDy-{2=DI!q%}8R4 `ww5x>qCPk8&>.!ɿi p終aaM{aVp60. ~3J\P19:U2Z(kp'E%ϨPP}" o/;Y.6wo/c]F&b814pSKWS˭'H- 1R '{&wߏ&]!=V嵽AYwAv Xopgyfciw cˑ9ڀ^^*jӧ7lj8Iۃ:"3SNt!u.Ɠ~_@L}8[oRt4Uv:JVFx T*&43RɆJw0kFu68>8xRGi,޺ܑOq]?Rz_uEPk=k6ɒEIJش/Ô U.lĥeȬP/K"__Sfu>Pt$ŷMCĆAQ7AY |FU˥&EX~J<&N`,XGo WGo430??QUƻǫ_s#L oz'߀}CO?Ww!]7G)8! vdiWo.^[犁4 .[;IZ]נ;Ԟ7QT)Pi /]~>7F` }B g~~;)*|LUp0rwzwį|wjEhN2(Pyn%fGh?WKyM?Ңָ98H?Ǩ-XCxgU2P+Aוݜ'?H;TLl"`ru'.*H{G"C[ @k%u!]rU,(7*}%gl<:3^05OB6R5z0mRNj=_*!}*bۄa9گ๨ poaF@Պ[el^=85P"x+ahw}PͩyCZO@|mjQN"_Igh A:=AddYzʌ zkA)@<kȡ:PNώ)ħý3q>/FV8X-F:ǠweR2'@Tpr!7ih +r5s\#,i紐h)B3Ӕd{@B 8T̈錮nJ@Θ1ŝCiv.4Zt=@5?oar}S*,ZSh0,S 4:dAJ0yƓG]'4Np† 9_RJ0K[G]-,'ߍD1%[R`&X9`<@p{m R HGָ e:\H8~ϓO W}kW +COt$#]-̺\" gvo?GT  Rm*fV,^1=&2lߪkcנb!7ץ|붋5^ԣԳ:zzbtE=Ru&*VEvI)F/o` rhx XLKpث@(W녟ygFțO;Z凣QQi)xQOH ߡm}w?ka1[Tz] DС5vҨ19//1;س~05~:lU:{|1=;گc*pݥVﵧ]4>0S>%6 cP; 荛3'wO/[[(ɓ7Sn?-ϸ#9vH/ %T>4cm {'c\ *^`Ea ,C{2\6umbF\onǟ޽0*Dͣtن+Kg9%+ٟH)];BrBk8}T Bo:8vFԪ\#@ZLE"Ua U*=c% 4 jQ1#:UNJ_;)Nf#E69Xeo't1s&ٗf3ʪ84S{Y-+ʋf)D>U meZ;+EƼJE2BHI'`銼ܔ_ ?A RP; T%[+A+A8_:͔c/g!E \lICVF RK'B%5LK^ͧCnv?f.^У.SPʁn"(^wr[ĵm[up6;/v&|10}//-A=T%*VګI}`a^q@SN0]vLjxuF' '\MXv<\L&BR*YL4b@(ourAO!J{{\ O5ʆ_<2_You%n?]/J9KKK˺r7-z;7T#&NǨ4 Q$.Sh( %.1eLqZ|-ߐ?>V4Y| j&p~Q W+;^/O.ֆ\Q CQP,/HEba )Jt(ɭ]jn\F.$ypgGݏ_كO' #1"˨wm~Y춷x+ SfX`,vr`olĒgrA/mYZ8ZꯪUEvJ)U,s^JܩjA#A -#Ty&e@W1X1>IvA>Z +ˤ&ȵ|Fz&N9LhƤ&k8iAH¨LQjg]C+469h,ykQ N%D[_Aa5UYkЖ} fB $@txǗQ(6& XØ:raKf,9>!Rc=+ 8csw*zhk5N;Pf1C.@q ^]vꆟ|8mi{8bC_"m]KBx=aVtOAs: ǡo-RbdzC6uؕޅTLؐ = ^ f(IY6`/l< 4dP `8ZzkTFAz#o߳+0g "1&uQ?oBSZ-&HtA;x͠WD^<'}h94Urò~Ood89` z;$kKjI}l$G xABzF 2!%Y}c JMb*g\Q$]qk !RQ:am MwkןDKHZ>e1)\^K+XT P{mTeyY'ejcAΫ*G"T*[BUKǿ9" vâ> VUYqŠ mM Mި2x`\Y2RW2 JEʑRF+S! HSd$,K3֧X W1w%nKhcmez, *fwP KQc[ؒ}WF!+ĄV7S܉*ڹ*jﶋܤ]DlzYV(`LJ4.ek9pWJIX'2d2g[9o/ n:KHvVvG 3E )+层X0"-~؜QfHʣ8Ez՗b1Ư!l3 Uz ڋn}Y?p^ ~n`Iu SyCVDI|Poj(h:X70>5)M1}D]>dM"+OC6Jn/Gqp%[P 9y3."T1_I]xeLJuăRl(^d.֎w@/Ґfp"\O@Ma۱yHtHf1Gi!ORθVJ̆M_SjUŗ-Lcf\ cۃ1`,\Z@U6.^vaʤC;Yk@(5cr٘i&2ߔ4(SqlglGE̫{G&Fs0Ay^nq#t 4 UjgR+&qY@cH?MqC.䫝w?ޥZ{&O.ÒMEcf6s2j~}\փ?yW r|N*A[ԖW ~_AbiϽcJ-z %$PEHqcMbyJ-_'M #h:F X) -FUx:v8HY!QZmb- 6拔Nn4ZT7O8iK3Ŗ7pi6Q \֫0,F01_ӥ)C5K*u}3"3DC<g. Z^=%l8"DF>Syh~q.aB??N:3-yJQ7(JA_%ɬq bX }|}p)8g_L; +T-[jzc]|i0fX)VB2芐dyaSP!6[zWAK ͥ«|w5czH V-@7TQ3_Mq>"cIVI+2 :NQ 9Mv|{/K#$AẼ-MgzG;DҖU*I*nq {#q% EIl1ǹN7d>eshD-xa jthԑK/?}wBzqqCbV0at1Yx-',C_3ƈ-QѴ>hQݷ=gcƬ:ɚY)lb1 a@(.()9)&`65S) I&"pteW%i<[^S媯c Om_pcBpU߸%KkH*@9$EI!t\.,̦]X'U!ԦVKqfK`?^cdʮw{t⨐}= i5Ȏf*=_0hJG)*^:m?1Ih-D>MV=6E9A㳁c}5d!S!–UUxF* 1ީ/ds=]D wb+f|+p9JX3ƞ y̦Y% 5p4幞fޱjbӫ%t"c5`\8ƎKkap]mHU#ޗfqՕwbW+&(N—p=. &sg4htzyïEPE s'N x8뜀wwp7@H+,Z !Ͻe\WQN3%y {3l&ŵf9S^D?i;n 2fȳ$G/7P·|k F p@q0\uߣ3oHbQ|uU+ID{ œ~}t.|_+y tn wi =XLR'4$5LhwiއQiR"P e P"G$D`bVP*CFYX2VO%_M*(1e!8Tz"O9|)6J{LZҭ(c+$#31 T*GZL9$ Xxa,84,ga 6:IfBq1?!uH0``}b*IQ=GѠxAHeLdXeS֨%`[ >VM/>(BA}`s|:˕UUu9w?<I7? Bo|Zqt ևd%\ baxͷh4 î`u&.[KT~@bqѨ7I, M*=E6xv4M CNaiJy@CL; "^`-"u|l:+`H!?C 2"P:cEDNm m ( 3|Sc)6Y C%}f5[j7)&2Hrx[k@=tD)eB#²=0 .u ߛ Ę7Zp**۶)dYGe$mAǒ1<G28h+! ZZ XV@mr - eiPwPyGh&l9WJMPy8f{Jv mSDŦLPW8A@z}A=ՎoS٧ZrVwu9Jd5 ڟ BO+Up#HlB?q*>R' E6ex =*Vha)=8F]oðfL m CnܤxG:V Uk5\`Y Ѡ\I= A"cD5UX:=wnAkbo1:i*).ө#+B~RbD˖bkOjGAԮ gCɀ6xbgLnݷQTØ#vi}x<87|?JO$ =pfYrσϋl#4J^+)σwX,u?y G!p[ǩ}[!U+DF!|X!RLm:~NZ YUt}C\4\=z0Mm|6_`(MUwr2TQ&}\yWAˬ".:9WJJhNUΗazLɅEfZjgVQ ,#:^kdg,-0<󌂇%yK\9 P.u[rQB1H(!mO%pW\v>{՚w1^ڣ8U84iI]BzmFQtz.8pZn"F;nʛKD۶ Zw>ߖbZk+>q{EF^lL(-(c7dzMمg1cH>P#3ưu]xآiM YC*&5n =ZୖKRi!SR3LY&)6^yR7zϥ6 ygYʗ JjPّ̳y(8C-Mgy:puDj'۷gf>uiw.ĻUٻy-@ӛOҝ5ZN=R 㝃??Ma\S1KAEK/Α7)vM%S2YR49YכUwӺ(ށKq6(g%|}I!]{\T҂cH бNJo~AuTo:mI*ryls iŬj!/̄D-W#jMWrQA8(=->>0~8AƁ%P0@UV kXI4QCeݚ5lg ?y]`Uuuoq]Baa av ~vo'0"QFD#I S0DLxR)c*S?xrԯl6+}}nG]^)aZX’{f"3Y|vl KC1{NIкD|!v&jGCUpFjU4SΈ+ q̳v;1LD/EhvPMݰ;o +y9d4ChTfS1QA)$)hT9GSl8,{S3i v ^ 51UUnTY$r|b+։]4-;6!]A<ě˿ %ʠ'h"2,jU"/l(b7_s@\y0(75J P(ӻ_'7b@#!%wXPbT g.>s}R9yp&u`s:pͺabǕ3zW뮯ujwk$+Up`(nG2  0^3K5AcQ3%lS~WJ&i(7>RH\] tY5*:d-:Aߖ}VmkKnN}U 8׶$\jOc0f nzLt'sc]:_s«zVݭ/``]-糫thZTv*7.~~y (m`'(LW?gRр㘀eF(mnpJE XQJPД/2FKӎDvFdUZlbIwA ףc2hq eσ0' q8^bg;mg{Kě1rn{:锆go{&9M0je#H_x)QQ{=JR}@epTღF,:a_u_XȘ3ȏSaO^ Usd I|]Ђ72<̀`|eqX[rW汲 8m7"_*q+fxYzٚ)gTEVM]±p)vcsfpY0SB#X9-+JWHD'E?.tuyTuJbT>jE"=] miHOWi-]Q P8Ъt}ortic E6Qg1?OL;>NȴˇN5Z9)V̞^)ut퓗PmG% X^08?S/ӧW 7l.]^_D&ֲ >/o*ZX!/9ѷ3a5>Ї'>yY F* <^&*ikzsʋ^*/zU^ U9|Uft!%} N\,:tbMm '4+`KJBN:?iNk 389v- /1Ϫ55>ZUU뾸D1cG)It7]zsu8ʼO Hێ6Y%FB7BJ':;bwWH( NA'kXfw˱K٢CGTl'\fn~ t50a.@|y"%;U )(&bY:d?.t^!fS=_#`YR @66X 0)-{&w OTŤ#邯ѲU[_SPVLbM(RAX xgXmqZ_e<ڿCH p7#-v/vC5A0`|2w2+ht ^e4Ob}>P5O6ĝTS*˒bVUWxe[ހ1YR]6 `,!.dΕP 5ٶ5X)+n^e̓()A3h5AEg:gd2c#KIn%ux'#.]#T ?b5|>7_O31uDex9/Lxsaāqhï8p-reTψYFQ[)m8 ל+VfʘRQ5NYHE qw{+~ C4 [)+oiΰ~.u3zO{w'[{;ںwFAK~5 }ry6?~~75]5uO2 =gP4GgUT*63Zt͇,:;kr͘Zfޱ>RSd#`&-z>qam݇z[b9fjf~1bQ=;A*)% tZ !YUi>ᰮv\^k;",?6ƭkn?*pϧkp Kl&Zo$(=hENf׬`;)Nfń1h  /S 1]Yw#zȒ9BQ)u rL ZG!oBDv "@[~~N?G!+v3gwJae[]D.Zu$ZVm'fTq^^XHR p2&dݱhY&~:jĴmiɸ, 8m觎TevDt \HYCJVy ^:/'(paO|4*͏N5xk ig6^e#F2;価BO/~|Ջ/Շ/-][Lk5mVϫ鏫bϗ[团mO^|.\s٘>~Y֜֌׫TbmTb ޺PbAtjlr_U_\ g :'{lq2NƝ zK@b>fD:~CQsq(%( 8\\zs?jϨ~ؗu%n^[9=q Ji2ý\ûѲ x#ҶsƎq*`uec@Ӕ Aʪ|UrD)s{T\eE1N"9kzX}ŕK8(5P9(g; 5#W ZR^Rq9b? Blt$Ф39306H[̩yt +iG6ls2Mh6BrYHlG})Cǐsec =.H(,@@BW2^NqyK{.]^M]wq3/jzn^4#2/1z`f}@j|Y5 n9>T_a,jWTBxؒا$*\o\3`b~}ۇH>NJ!Gr=bvJU?Q4%z, ns3x, V'pZ0&B d铕;[vN=‹mV?^"1ek>ǚ!#k:̪}Y)wGhNXZ.֭vy#;M#6eLMYLu'ܦX .mMq.9JҶO1xt&', +uql(8 V5\`YgBMQq^h(bԉ12(wN5I4ye'Uajt7F> u׬jL_shV;/!Z&,6} Áj9db즑NTnX<1l42gwGeZ mAF&CFco?6i tkISiV6~hS djvjqUj8x,>9>QZt,?{6^,_);XNyQ*:-$';$Y.b,A4yKSu.Y3zKI~vkV]?Zr%aɮfŬHQµջ$0˼T$1J1J<#<.vߌ;^Y /UD#% wލ(ՉRjN-(+;щF&RaU!.hzگXj y1Xq4)j+rwj*$ýt# p/5:CќjBJ#H[΋b yBI 啄5V%Fc ,9 $VY0sHh 5MSA[2[Β@S, 9ptAYR򡁳S z?SCeN cޚ*oFzcU 7`Hk䦬z%PH5oG^4cL'Z^ puS.MyёoThtԠ41Ë}ᛐ1jk0x 5A\Ȱ1[,]I&00LJBK;w- R+GT'ih&L\B33Qf^F…jys/Ǔ w@k!#7XR,DE'W=B`o3qƬƙFd6w9q+H:؇UH";$HL-V3"P?jby׳g;C^k*q;FNCgOybټٳuLdMFךobM/r;O8o&A֬ Ɲjq^UWuW++!\Z|`>\|8بprDWiތzF/Aj9ɋik1:UT6tAQ8h )8~JWޕY[`[r"B+vcD*) 8Fh;/й\b+1>Q%FӇ-J Y9tI&i$[MH#E&HL<׭yέ`\BuӐ@eJEPSlH:ĸ#yY眦$;heN˜H bp\cQNv q^5]掂 :. Rz4=zC9G~0VY-8fx1xGT@Ba ,xs5~`˪,T.CqCQ1TBJ3#5ڰttX ̛ ; x=-w1l1X#V Ĭ03($P  J`oir(j g>@UhS mc]69QaFGTR:Qhs`EV"e:}v67yӌvR] 5/!A:-s <5 +BA p!װ1Wc')- ֎t0K,J}gx5S<(f} ܇w?\Ȏua  Pe_?_v.sւ~fG [AKiǯrAG&¯47.u=JC|ZB@AOAsKD 8B4]tnbc :H5VAE@XŚA@ujMJ쭕̥[ X 34&:CBb=>at_AX*s o\`F;L1\Xf-:<)^x'M]HHUh*&(R-{SRuRs8 ؁TAcT :g=uhVXsS;VË +suA;DY!- 0_Jc&~9M"b+AyҬE45 8r-7H }??=0q/Bw;r{˿4Vְw^cO Z Cmf䷇q10W<אkٞXs~LmPkh/g4<>z3IĎiyy^cYk_ZYJ3,SB5$o~fD g~~vΘ@62$ww7?>=m>7~~ZtocW^N{KR+:׋գO Cmt6Y;'XFO8Of! cA$ lRGW*uiuU1$YJ;Wl՝3p7عD%)G1VsM;չ̀Z^HX#>oJ% w^9wR5#%0R[>5/힉4tB-%iĝ"kͧLc{CsVqC[E戴k"ڸ`j ÍٕbX֐RTv6gj.%>@ɓoG_3Brb QL؟WH:yDJW; {z 8>8,!H/ҢC>|.꺓ޢOT48Ӿ'aS^|+հDsXI1|[V|^x0[gC-AK]apth5\I[2} %wr}qc@W5rЫ `FP3՚w f{SM4^6J=R|򮁌L0- 6+ bHjSpw O<^t}"ZAx ?qpb0O,|7'9oW/˻S]Hf_4}jps7W"M4|L#j'}/9#_w6t7?3isHB.\DTZۖu#D>XĈNgԱn'tX4Im[S#[ pݐ))S!!J(7 (!HQ&E#knh{-%jx!`kɳKA(3`qm R:;3үKh+įR<|Wi`v :JAq-i]XNTk gkZXRŔlahəb4ݙUw= GFf9F%@"/rSs)LA#HЫU{D;ƭngol0׭ }^D5kD^?Gt||(+c&*}vj9^aûϯ zŭ+'qhlu3ewl<LJ!D7?=r]d~ 73m}f^7> SQ+#Nh:?w `mn<@>k``٠E3*}SÑ*{-.cR4_+ raoʿeLԐ^ݭ2Z12fg[78mpR)u% hŃ9܃4'@0\1Ӱ)!]7iiQ3ZmP͹;<(_\8-pj=n0]<6hCTM^^'~^> ^aeGIgց `1g"W=.d-و'Tۈ4vH:kueT7*P_/FqfR*l3"-ځ3gВ;V_8ƽtY& X7r͙ǚ\sII\,$*zD8+%R2OT 4Ra(Bme Iofj,ZOH}3QRo&COL !6†]yC~q02{ͭ7+^RK%Ks#Hx^Im+F}i}I$Haz`P<Vˮ,/)_-UQ$IAE=#CRȭDfQȇkU%|9z2.Gs@2W9fa]7Z h1I9$++p@87RdaER@Rf`@Q8WyOu'#g@@P$gfuf֙iXgu8끱.cLePi3Be; XВ[.%҂eOjD.v֧~(i֧~K(8g5†9Lt[%301u1 9H0o!hH-)Y8!RjͰ0WL\2_h˓ IA/ *NYfccBYF0XBm#4c"?!2)42˜q 0Adm +-cZDX II(4JWf)ˌK*JJĪdXtIXnhXU)l)<%(pF1@4|X`qP Ǝ8V!@1#  _mŤP/['slLCYYịs?/QN>޼<(4X]qsi^]x~\3 ;j~]& 52s>XF[b?Iwon^~ 1A!ŒAxwS f~Mh.*F+Q{es’˥РaKB*y)@\[z8t0A~;8Ap!Bu 7j=TJ' cK \@Ld{ŋT &2-rczdJ:.W8Y.\5gJGI߹SBAhU 8mKF(y@|y/@BAXawTQl1`PLG ,FAQjl򡽄1f)( `S6&+|*SQ3ch쩂+ ~NU'< RBN3 Q~D2B~_'#e:sz ~lKĞ`5U,ͩ,4 !2n4 AY8'0Be8}B#u3k R"F$pX9AT=I &z7葧?W }m І|"$SMWuv'nĈN7hSJЛvKhvkCBrm,SR ,uN+{S^V7E̾itR;1_FIʧr%vJOeTV;¬et;0'[c#d] N4x7 95R,FP'_1/@R=*o~;W*;k#Vx5d|~HGѻM *zaf~QeG9uF`% ׏{1i;ЖDƤ}ۧYzgO$8$9iB kB3Kрbbcb,5?-ev^np]u9d@лzXh(d2ʘP͗Cu eiBQoAE?xW(+1h9^dW49D)5#@!5 0chsE,jݻ얓Vh._ȓ_ȟy4#(SxQK{ Tf~[`F҅> yqR9ܝ ))ƎTB䡜)D7#Wҥ9a*lw8ܵD?Gq?}JE N*>6 g)m/.¾o=U S(cJD:1 :.^#$(VDq/kж 7 7PU)~TrB:#{?skwՑO'/DlƆ!'bh,xDiNV .1.]A=eOAF^f;H{\(+1l&H7G_;H9^+ }AB,ƷQh?!=<왔Ov(J{)_Ҳ}~Lch ĥq 2#/IKΖh)G;{\w^_fo=8`J0O bAx yPjeHp5+ -0\i)&l|ϳѣp*h֍R3ϭ_\̷E1.AJ =~/qa ݇{{w׋}H'~V)B왟_?% g!p>~q܉ޅ%2.nH.c4^py"DHxòd1~x+0ol> a\& (9ZʥGĶhwW4t'UAtUuwܢjdEF[Njp3ʕQWɕZϕQ[G+1XJCz=g1͔ěHMoՕ;Vpj(C!#+i1D"MRbV80` ҂ _313o]Z-B ›$cq|Lc(3@(w@nh 1}WH" C$ɀ*!#=]swu*q I1F|0_ 0 Sf9D@F#mXYPtwlg rBzm2qb0\4f:SV?ތ[l>[|[ydխ/!) f;{=(HFsZI2Zs}bJZY>;ޤ)D==T\ZZ @ *EEԫ2)├V B@<Nehl&3E 0S y 8(A4+vNX4` p7 styA(FLB:i(`T=ĵ콰%}8_i)_KC0;ӬZ V -GVrҸZg7w}vYkOEy!mEY*kX.q{eC=B%)vK p Oy>?e1ecff29m0]c4ҭAJNFX|zf2 !Et, )b+IZFju\*PļO REV6@-{>wagZa_y} m0c>@].Ԡ\  {(y"2y"FLHkġ,i(*Wvcoً[/nYSV(j2HFkoqaJh"~vÊ->TFo>tYla aܝ2N:;yg9BEə=ŹT؍eQ9); 0qXxo{& ?; 4x PrB#R5)kiIeHj7d)kA;Gv~FwFS/:!Bht [n͚x@A4^w"6qXYd@:lD^vpT) $n^vV;Q)w.cD6{s4f:%4W:e%ie ! 0ca1bS ߲! w'dINpXFHUF<:Q p(/:*=jesCSeSc,Zx'3anhb:Z_VF <$ߟ"%B=وL vԫ>!`Wb-^?wXt@r޴:ԼBk@HVe1֠S`UW ^ kZ݉awgPiý+NM( BU[YsAX H!EY݌yq\ۄሄhM; 1O;^ GBc#W  Q6aЄOC]6bП5TA-g#ۉLEѓ9dv!i۽: *": tdd?rZlLpM+ M00ǵX"}"~c=% Rd$pȳ)pv>s< dS?n7; qඵ¶( ۝WȄ:(7]f< H,dec^ܙLڗ!9//̳QO!_#/<;R|17|rqy=t2%OnÞK[FG 5 L.ܸ'+HiґT} \tm h-%+(u8@\_*lekVp永hVdy HիFmAͼ I )|oϊdT`9Y@;!]|9{=_~8O t&`Ǜ=2nIw]͖PNdJfvo jt>ZFJnF23##b@0N{et$ feЉVLZ **vZ3= #U`O4s#Ci;<D/q0N$k7[ESR+:a#Jh6\E v_Ky\me 2fT#V5(($J9 ϙFH8~5hpMBHѴSbi@a<,qLAjttbtOI=A*zӹIX&dsA񂅓w2Lu S 4mSZ>7\Uဥ_'j[ ||||r SqetC 6R[bxo^Go*: LvaA߉z<_;7q O,h}Vؚ=#WrNr}͂r51X XyJokXљ-<]3s:)GF!Gnxku<}y߭2T)ܶSjPGAdЌ$eLnN,Rw[Zc #)C*DjA-ZB(ZUyEDPjCŬRfBlnm#%,J*IωL*[4nV۴ĩV˃i$ [dz#,HK.Q,&YL%cFMy*!ͮZƜMBL՛g 7Nؙ]lNeӱ;9n'ẍeX#>H>Sjp܅jQL5w ECt_7s{zIή|g8͆JUj,H@KRGKV1oݚ*+M\B\0LiU-87VjudB@NJVie\e<_,};Q][9[$;)0i> { 7|[tvk1ʘٗ.BpX% cg/A4Nޝr>vJuX`'WgT*EېIvC[zm(j3yy H eԙK}%V c>h#`[q͘ĈR4owTsaurwl܇fМtByWdvl{Y&TWu1* ÿ1 )!5w4]̽U_M&9CYk㣊^ˑc,\O`X0cth#!C`6H%n<\mK)%Jۻ?&QB}yK}%RW;ZDIebF4Tp:Sj1Na9)5QؓpGAc *C՚p`ek ./}}vvEx .wo·qNlニQ SZd! YHaC܄ Ss4+N^R2 (v] :ȼV°EM@L UL7Yv`ӬA˘rgoU`1jОx#Wib1t%ѩM̱1lM1Ð ebQRBné?|աH7\qm&PSHUj12˰t]uYᇎO󚄄RI/$+i(w3qUu3ydg7 =h,/ ':'֋N%`Sa+=ȷʂ5P=)]Oo-hI.Y%wkFD7jH c o /hk\¨° ZC0>P)!b,qfpg;?-LNe8]xĄԴ6kOsmi {G8FZMsUIs<GC{'7)ۛYt)>>[# gXϳ<=KgyF<ψK ~?N-@? [_mR,|bmdOpCr{[VZ9z';.5 PC5;Eֹ{ۊDŽь2-7: _>,dS+b6ތswvK ]aMhs PrP`>HNu+p->D*2F'Tτ_GChU RH=j"PKI=9~$pmI|>j~6 tI;rw?]^u |5H]n/?\I% }:p㩩ڰjZhq2גIp*BX1VD{NU>D&R2[c9خ銎iZ/MP5ih: P+nR =zTm:C}͹Qr0UC0v@'CeɌtJz7$t܇Te,3XZ_VIMhGaXڏ!璴 1Vxm~`> B p|՝}F@3.E RA::VW}Lp9HtҹIu5oS;x֠ 7zju޿uуfʟnh.V6 2UfpGVׅ|4l0ަOozHedlT>,@&T@2?2 tVr^GUQ+*)& L!!]4>S 3^P˕L?d !=Y,'tOi0y)0vm*9o-6@RwܛNgwJ)6xtMu4EZEOsX6~}1:ZFWZ:$WuزJ,ިNr}_SC% 2Sŀ-;|'H& ,^1Z|Etŕ$ ASJ=T:3BkYZ(⥍Hָt.XF3BsK~b oK*m+8& 8ÜIE6)$sIZM qxrU[jr}i1K` 1!$B.%7ij1:i:H &JΩ4(MYQYScoXMp/Ggg;IV2Fڒo6@I;')Dd YLLI**bK `96IqGĥHcB"=epdU`z`gUx"\sϥALc - Ϝ{ $9DFOJTp$%M`Rhaܝ)[@>Wq' MX¡`ik,ggLӋ\;RR-,s2qB`5HeX[ÒQxeevR7g'h'4q,܆v8;Ȯ* 4QDʳR{CM3R $* E]C\pZpM)4Ӻ e|&Ov dE0Ru,dW2xNoM0]5LK&)[LS_~ƪ1]LKjj 3װE?%T 'l5jurV/VOF^s}thB#QI󬵔>, d!  D4Iy8[N)UPPh5l]1M1MygHe/Gc U)˜O 6o=9of~A{y]-L~/!Sstr؅]oz <Ľm5i80ORhtt䆩$>V$Uq8 RR _Nڃ\[vt$N4w}!p滦yY&5i_-^0鄯knrQm&%PNd4e}NfcDȸ(g4އTXzt 0^8o aP "%nVұ|wQկtet:4iJf-\U 7$BMF= չX,.FxTJ81'`ْ`Br!i;0mO,{&?I^sȻ0@V8AiZ"3O^+A0'uQZuR$@$JABjB}x.Å $Ea)hGD}[@БBJ8X9s. :xH0_wKC$@.ñ&n#l"vVJa&(E"2FsAst*flio0$al^lJT;T$0$d%+ϙH)&Wow^C 0t7&Y"0&+4Q{Og-C#jJWVAgYVDސBW&J4p \e/9* wNC\rk980cjVF-A"e ieUr Ɋ%' ^x *("E[Hf _d)3h7%;AyzufwI:z‘$R:";?X}.tq\6PD>BLo/!Na-VhH}{eks0tp B 1D  B9ief._l\ޗ*Fn0,"#\)*p> N:az1Sf //yiwgK" "z g7n` "]d,"0UG'e˺CTN Rs6] og?uN ]atQ,8 6?LB#CjdzRx{v>} TTѻ_Х OC;AC 'ShNpT8)U =y7)v ]\8wSS}qͦ?]oJVROcSbMҶvRXI|y*-JÀaBûR+$6%+.^#RLʚ& W^ TNH*I;<*JE" *y_-pV؍Gzq=qc:8s^X-N")ň͐_WjQʂkМ]НgV~u}f_ 5@@ap]o tz)(% YdYHzUx+lOգK]Kt} \yYw>P\!ZdVUwR Tήu|w.sŵlFeW̬|d W-ɸ]vM[>m õ鶴;ܼ5Ar_Z2M,YWG.זu׊i-Ε+ @([R0g:4Hƍ]eZfZtVCZzhw?ZǨq W\FlAE Z!meC Ŵ=#FW.Zs)H$T Dd%ΗKTT0"f[ꠀjKN%-q"kr*$J[*1n> 8vn"Y1yՓIaϗוpX~/aatcthP[O/Ķnp 28;>m/#Ǥ%Y[Dg l)^vAJ Zd2iAiCߌhu(VWPAw|"8BtՎ?uӇhq:OzF#DLg_ϭ7i &g=Oe6}? "װ-ciشoΉzr]E<ܜm}W|U(ğ{ ۼ;)>cK&ԝ|;a}Ht))lMd۸~Q;.?zg61ިX]ȍ/=?k^|{[ۍ??-QY4~ȓ21}Zy@a_ܜ^yyj6g'y8Fc4 .F{_c/_4߼>h~BU# _OΞyC_w莡6/勗{_5_yټxHgz4kyꟘW7o_~~泏駝?5+Kׇ?HA/ssj~8ͅyx ۴=5$W[w>o]lylttw3ӛtS~XNy0r24^dNz©xC}骎D+?8/65[#Dl/O΅N\9{͢fu|c`.ͳ}'. }i~hy22Uo~W[781fj[?O/0}}|K6A6Mbk/~yu6}K" ǵ3 ^tjumws4OKb{iEGM޵u$ٿՇ kxOĆ`7.iDq9}IQԃGl$+8U]?OG+ytG\I~{3I7?tF>=;9z?7B˳}cԿJ&K0`9 Gɢ^<~ݯS F|2O^=;']O?L >DI4Oʏ 5r@\*lq/>|O}}k8*a?} f|?e O~0h2ǣ%/~Mҧ-ޏ:8A.5inUOa`ջp_|~VE{N?AtSQT~bMVnʯV"6yP;~H{sx?2.ǂи!mgѾ&]Ԯa6)Q6bE~ۮ'Ju6M5TT53N$vBĖsЪY\ѫWVOO4-06n-gxa{ [g#|7%ׯoh}{I{/\n4܅А̢+V.lDs:? O CS6nhnO &ssi(sH)?r4 ˢdIZ?4Ei0I%7J 8:FT2m[ ?ݤUPiMɪJhR("''Q(2׻9n}LKԋ+Ȧp =`ojʘ5֎m`̗(n V b~[e Һ( /HX*Fik XHf)}`Qn)z#/c\xSJA,)U3vBhD ::\倜&!ue'vu<+?=ڛ+~X|y8Џ /{lB=ϖkI`xlKn^|G٭tݠn'plzx*~:c'5V7{{0^e /V]w~׊JׂE[TU|g3|Uy&gy&fwy&<]לgr!mu6gr/gx39m3Y+.y&Wh3,3*]_a5VlC|lugG晘bLvy&Ի\ )(So4JZP1԰m+&grS ~-y&&"햜 7q6p.j_NX[+=} ǜF>%>Y]'/b?+8'a_iyvq,p##]yv]$wɽ818DՀ,6|7f(/M978xɂ$ hs7oͩOI˄;W:yvfnR8wjBMf}8d4^դVJ)Js.buA&Ԇ~d3n!Sj18"4]iZp>,K !'!H{KZK⹈":M*:2|l9m5/Jhcg+Ah #Y)RzmK;ʶ{GpmhgB砖{jnpFfh<8m۪$~0: ݺsi{|Kη-OF+g'KtMO?cOo0ULmE>q<ݟ@QbN? <>/M<7hNv跇p#80R?.R=?|?;qRbQa~;MTӃG8vPdxU^zR[ˣygl_|AOMEc ]}yNMRݼ|AYn~zuxy⧃Dw#_r;ULtE޽kJR({wЀa]ն؟(x ,UgF~ޡ&z^Ӝ m@/e~9ucu'eJE={^T)J6wwu.C6;dC6;dC6fB9w\l隉)5Pl!%cJPrpT>۹u3.Vs|9rB]MV0J'5cJk.,&^_a[ 6|&p+:+6y [L->G(༟&ܠ}ԛ^FgwL`E ~ӫ?~~Ei>c=6!ƹp? qMmֵGVb\L شpǠ2ऑ֥vV$wY2d2~h)K |#N`*{/r=jwŚ&TpDM} <|̻#IVE[r޴1 p$2of,'w̥tƥhuE)1Y]ґȁ'K}^r9K>P(19!9MQ3;U̜'bQeESbVX Vlk-.Ke g$3o4Du9 'V+@W&lSLXf V 5Ĺڙ5V9wӊ4B ށֳ.2a@cci ,qQiQ]$_\fOGU3ͳ[AASYauTF1bL.8 @v* +_EI]`2>I~hXuv^ XcڤVX=@-IaYB(yd#HxE UVg!b;S Z[1g؀A# 5o ;`87An'٢o५dYbXgԶklp@I@#Hp0ʅJ5i f؟ދh]%@LB E`uϲ1RqǸPr&֎ "|5r.inX#ժQwChn^s R,EG]x-%fI`dZd#bB+kjl>VEFxԳdM8F|DL- zU$ $@>^BZ3RPJaz_2 Y$AI2(pxJ]TM#}_˖EW<$Dqxx6=0/y m`Q NForUUL+El~JF+Wuecj]^..ya"& +BJzdi`;F^EhV0\+ x/8jqE%//)H. odioEh4(! $v Θd䯷 D.@Ä): Pu*an"ЉNs>'du@XR9݂befK2>/nѯgЉ [nF/ P;l+lzC׳7ͿT!mzap^39Dꯖp1W5[q7{F]P߶yo}_\eWE e!7f@B姷`vT0g@h0"P2 yeK{]*5WA]9a +:fzAMG UA}.s9yv<W&[#1<v?o7/:<ػEx-kɏ[kzbj/U"B11ӵ!m"^6Nh=X"9J|D11Ѹ:0!\c* [c1].𒘗{=3d@K"z[8BUZٛ!iH9d|EZnLOpTL)/7.4zh1X|̬a|!ET7$I#ԉ }uDz@wF'c x)rE;8j ų?ҳ??nϾ6fj2E*w/fUFϏJ&9k|Y&VcrKJsr9OmQ~ hWz?}F|^3 R# Mtg(UUI8ƻ JI0>,/`K X#S.7MΞj;Am91Kq>e`޲ 1-cmc˕YÞ醋ه :aI+ ej3:,aJUw."sˋEIbD{tC&䗟\~'Œ|'F0&_;a/:ʏ7).~^'oͪb%Ilţ yԨQRow^sPvO1]ǨǴnF}4$8 sў\ M4Ph QBVt0|p@H.dxy|"f QƩ)+_<"x^b`M&`iD ;`kų0QRGپq@76iUKkDHB"~`;f* 4 $aD*KCu$X*"#7Qa',i'OJ(Zy iZ/κ2*4~Pg1p/3 .RYȱߙrnKoHosgߔ糟\GhEv7咊q5t0-5Mh Uq1r:E %Q K[4cH`k422Pf\kt$ ` IL4p$أrf *0EȽOT N1H51=WJ ( ģ;P QS㤽P 02*811u!8DY!B%@Y8H5 L/%!c;A(rO {,bC]/&.4 cKDd<.62+= "Xp)(`u/RvPL5G0"fрn1(AFAAL #"zj42fN;d6G%ًe`X@S2,B%)Tj‡Ai/hCRôpU\=e+ypV#"(Aл6lE(?ЪDRЪ6ZtZmZZզpZZVsZ'& :iUmסNL5E[~ZЪvԡ%O jAZV-O joD ZF30_.q! cJ z }8>p0Ar Ti%u ګ4d'q$cJz/ Pޢ;^^ͬO0db cff=F @|wҀhg YͬA) Iǡ֖LpPeT ) t*)qXb䄃Jf%Rhg0 q ^A+,p(aY0a`D(p"15@5 o>A\` (V`V5Q\Ā࠻{v?Ё&N/fprn;ޜÇ|/cwW\OA5ԗ״WRwf4F='9\L)B$S `yudz!#$_=i*vшv\^N'}Y\~uW7xSKyƴkݢLD{8 j (fmS'Hę qh4B󇿪i0%Rlj({rF=Q`c4fL2-:!Pj@P #揅UPs5t@wm u$:Y,bg4go_PF˝Ӡ{Mv[0vcãb8"GV4C0gopIO]*AmcZkچD$!CF!gq#fsTD V "5aB t b]`Ԏ8& op?LGb(=촣mT~Tc )("p_*G\3LwLm]L uhA$I1l\2l[526qUќhޢ"RN-C1n#@yJ#i#`˗N"6Z@Du:쵴QZ%9{fژ$kz(;igvugms֠G̦WֵU xG%MME‡hO=dgڜaYVs(F> 0eC)JMO(GA!O4C"Y7$PE"$&P"vZ1!eb SX)7 KA*ű IP;F :s@t[q/F7ǑEhq `R荻M&2QR".+gQn9WfM `^Qhm9qE~M.eT󔘥H VTH("["<{5=ݠc_R)SY>B9i4 eG.\zj>ּ7HcaHL$Z,F4uZ&2=԰7xY;F0q% ȄJd%zcŃXzIK@K0^|XĤny(1҈ 4I@1t%ze@E5/QO006HE0&<^*(C OBI6 YbiLwf6|JGRhL4N$IrRa\b5'`s|.MkVU5:xRbNsłXb)2=Աo>#}=oQ۵{M)vBƌC`ľ9t !BX֭2z+b d)Iskŗ衎Ń!==qz@mQH6KVp=ݝJ$,?V01փ>X3X`{,Җ ZE79labkEѰ?!(n &ąg`'ˡ?.uFJsނcwb^5:{хI0̂ء %1 ?ݨ;Ą ɼvJPŝ1 ŞS hc5႞KtbG61ʹ q2ZČH%"4sS d˯lWbP?uOi׮-NH fţ ~9l^@R1.Ii,<iڄ"ޏCmIR$kC5 r8f! ;+"&+-C|?ȝ*$O(HxXK/d.EUBLzJHӗ`,li ^1 c{0ՙXO2&,~P_OI1I M*w2u-'rwd"Mc, yř S#s.A |/Ģu^k7Ci 8.34RPr`*6`}TEog{>ȢdFݍOOb8O,1$\sABC`T!X7Ɠ9$%hSdµkEi Kz>žz/_CsnOYvT+ _Y7E F F?.~{/Ah磪1aq)Ϣ,PoZдi!kCo{OE/̜lyw"gb*]A*%ܾ`TԦf<$_iO(V^90 ri RS.6V M"-uszsc?V6w Jqe 眈80&L sN1\4+W4{S/e)!5^sXghp',umQ+6ɓ\38LLƮ: ;Da)fAN Gh0K0.g)nAdIL7 8ݤ[5kMd><ԓP)!F)EScb"Ӱsf%Nb@Lh%)N(H!9`RI ' %5E0j"wNbYIދ3d!(`,2TlsDY=ٰCޫ9SO'_DRE%B".t꼱,M"Z`!@ywYϑְ9s!k2kVWk`,Tykq ʾS\1[7 Ӽ؀8+R+&Nh}(xD҄NI^I*$a\s뢔h@WAXlץIG[OOķ]T͍@C32 W;%;)VZɓ1!s'LP";Fpc {Ku(VJC." fT dmgľs'ER`1Lt1fU(0@X| J͘P(c+9UX{꽒   3)׍@wc&FL-\UXr8-fz}Bd.>T=Yf2@\grXbi4 $BƊdWJ,u{i"CĀVx c$sčI(XZ'ĉGF$ є2#R*2 ]c&. ^39{y&q. 4I `hXx9JSZF~&JI9 ճ*3TAa-T7.EaM!67Z9] N,6B } QiQTJ@6$x!2%HO@x*pG86ئ)ONO 61 &`簬1uXzc47P5+D`2qvxzs88*g6:QmE(ܟʏ=7=̯s\A[pW ~ =Ī;tEIa/3)9;W]SIC BB&Lt=Hɵ' 4kFk"DKC9G&fQl&*;<(7<ӣ7NNdՊh`\tؙiUs:j4<彣<`gٯzw2#m|2`Swܳǰ~^+_GQ<}DuLQ܍ZAW+hx-ۓӗ'?v;gXat^ ~ .ԗ"Av=q,zsۋ:YtzfQi&v4_B0 aذ^,j8:?9EG_ѣxnثDݞ~{xG *ÒdM Q?gCA.O:x=1_G?L7i0~|ˆ?~E|J.7zW^F?3ytw?D_,`0zv98޽W>Ctv>JĴnZsѯ.Xґ,xn]!'|jgTB˛ΤR5V**6 ϺDtV9et؈xCFy㿾-Rl;w4Zeuc7\3#~S/WHfXge`Pֽ;;,a{V㺔 &Kfo9Dud=igp~ dy"1dË;Kx \&m+Eܦ~v{n9rw]M积<\zxGZ u]7O~=]ϻ~p N>&v'Q{'Ϯv`վ@_\q țCp]s;N26kVtbx뫳#V Kș k_Q ܙּk}g Fy{|>;:fu! 7 {c_>:l{gLx:qv7 4SguχNu:{`\o{0U0+0vs_9iϾ~*@k>][vKX7{'XAq /gFgm{f>+)ʆ"j?Xq7d;ˠ7:c%Gk4X.d.컯;C@ƮW`߅]E*0V7"cŊ0 GY0'g{=?2>%+=Gn}eΝ~܍^L` bl!W+coX-d>xT<%y#ʮ&(Df'~iwǗc+ D>rcAG-*5Ǐ4c)sOb|MzӷoěV֓o=IzB9όT^K]iz]Ru||;w~6s>%籢[zgAl[k>񚗉YW9%oc~ -x\YX6)O֕kNE\%ϧ΋sODI%mn-v^ޙ$Ih I/2!`&0K`"Ca&%YzfypTZ?FK4Izҳ5 R=!͓&j84y6~3en]F&e'`V~P IaF>{t+TE`mY,`f9e ^ltJ5 ƬP; kƟlO'=-sS΄`F[qLW`^s)dI9)(d3̈́𔥞웨2^LIQ)uט[[|ʆԘSj 玢"rq6c7#I{_ ,;X45SD˛T,yj7NKowb2ҒL0[4i(?4hUfTc* S: ! 䉕{,!k\{MtʅHW(`e] ,@36^6`0E1LypqxQ4v}cv7C0?M{U 2`:vlac4ce|l͖/ FE1n|rps6 tr}RԆ SXyeyJ #lkT0vkE3*/W gPW?'iJJVn 1a1h3`Rh +:Q)쌧B#,E+kn#GٞpC3;v{_668e)J&)x:ox/ h|*HdzTs<__/gFI%-pQ`]QE+P.y^"hWMa~'|}F#HOWjkϔ*9r!tO:v=IA퉃NĔV޿0Bi.5bXz$rjio)ӷ{1I['CNrw3yz(}pf1;' =Ҵw&w \V]K9_LR/ko|E bT* yx329}Td7=Jb7@[&)* U $cHq蜯Wkha8]}iY%MFBDLlZ'1൭%}Gw?LxKwЗ+0 lSL& .wnRחDKPZ_(`m!j#7\9OYO&DSv^9QXK?œl(jD(kIP`AU*jxܠ&Tܩa%Fsү1| J@O⽋cpJn,Zs (@2SGYꅱ 1+ϓ.`\&󷕸=<պ| n?l)j-{ F _XC ׻ߺF/FYs[93|vQ;,Rځ:lR7L "E*a@I T,j{J?y]L*_$1ϧhĨb`*}=(oK?WN7m7W?vlKaq%h[BoSӑfZ"t&lr£H &)ǪtzAegvyRq~rgrdhpG1(Q'KMjol]%GI!d7.gk삙u9D! " $zX&7#~P%2p h; I]ɹBRRťq m_RǕֲH`AsrM`!i0|aG&")RWu~Zz8;uHS'k~#HcO RM&NļНέX*5T bXIB â`;5ᘵD_i*%FEƩ2Y\?4f&.u†VFJgE$78 ejlJ@<( @Ed%H6MaޞF)PI{r7`̚hU XvZzz^~㕴#XkD*"Mc7P4%<^QޏE~ E{wƦ&+rnlxbc_Zأ I?-oQxJ xseb:R0Mv>Kje@̩ƪ7By[:6XG^*[<ߗsV> +*1>`^K&Kʦ_u!5D1 PahCߴSQcL4am6;Nl24EpAU?q ,+RXT!"*X ĩ `iNő,jƗ`MtTUQKhbkk`qoVn?EG(8u揜>*?f8 rReT!"M.;%-픋lb15RP9PVKau F^Ov_?8iaU a&sF}9gJJڬxg,K] 훞.- $^'%y|>ޅ2?$W <17^g9RCYCj.7#DVߚ(ppقQ0qU7(M)cGx[?De[x壖^ JV#5hШ! %$`鸱JlF ܘ ila[ kQLx`<vyb2ҔRJ&-ZK}``EzGQ;UJ֞7"sk:tno6G v4鈝oFMN(=Q|."V/a-U>,j4iP>?CUg^2M&O-O>+E顕+![|^S額s"H"fTI5\\93ągLRQ]Yo$7+B?!e=kX,٧1ْ[WoTUe,,13/`D07+I-ڰtb=a)`=_utocL11R # ^ރ( b5_Etoc c/y7ͫ# ayeI_Ⴧd\5j3tZпZfJ&ujw+KJ]o-w4Crq"6 MB;Tڽ#oSm{=8TaԼZ)(#^[8)t"FDz_ l.[>wc~M(xý7=u.w<\jsv`C7%F-B[+bPbn/xr#K|RD;l[)ʨT(OJL,) 2Sŭ҂;K9D[uo: *Ml58orئ#\SB`Ae)=irVd gpD*VqVCn_QXW'%O@=nYROD 5>9R1u2<fH9o&ꍇh+1EUT5'*P7~G.'v,M#1E- :!BpJ"b:~hʒR2/XYj].-'ڬpIzBQh! a* k X! vJ >y= <5TӊP+x"[Q3䒈re8B"܂b؆*g& = J{0f?1P/^Op5h&?C]t>C(&[Ը36d d2-c{2,H]d;?ی櫋O׋Ҷ/)7 ӏ_]ٕ> ^ݿ4~~Cm{n`Nu&?oWL[}g9&q^jy7+IP1?Zt<'0FSvc'i2F)Ѫ$MSJ'x2`*jۛ ?4S_p^ߕr|9dp:Q`d$IRmH{WW:t5&U6%O &_r !`F}@$ۮ<"^`ʈ/v(:LSS8M3(Q˵\1 ^spdS|Wçxwy=@}X9mn81?X b)hࠏ/1`a=:ۙф)]ɵ_`yʿ]߾|t}wt 9ɏ .\?'t ğZ,w} ׇ7>79L7t9C&CqFWy(*amN|lK^%FAzBȗ/ Gi<$ m -r6o[_w/~-X?O5%kKxûEų$kP| vC'%_ 7 20;G84!|ٹx:8 AƷV,QEG\S ,%P,OĈUY|*e] Q*-(pd?q'K KǛ|BL, =qQ[g`vjF'֙Y]%Ʊ`PD\ke%[@$%,`{CC ,'ۭba0Uȱ/~c/i+POL_%Cmzu2Igu7ΑȗL1kKCjh@_Ξm8nutr3CStМj5gvP Jotn3}6xŧnZƘ!):hN鞶-Y}bH}с19iq3<0CqHgiOQddb@ECX-{49=7H{ywƍxAbIq+gufBrƱp 6pVs:Iō!wG"Np8^'v%<8;Bp[}II5K\3/W)a#c'>0LӻrbB#hyVzlh 4+,iEIzI~ͫzcOhoU.uVzyaTa6qWŷB\apgDĩN8L\UdoLJ5QNi)22 JL#WIe"|z0'.2C˘wk5ewwara19Ӗs{Ŕ9n. R<fȑ}`sggO;%x#D腿 {._۸:6]Gp`/du~q+)|y ޓl*RIcZQbo19mt؂Cp\), J|96$޿<=ww>>c6g1BPoQ X1xJg a.۬9TY|, ¥*dSLs8YJ+i)HÛÉ/ʢLGCԽoIPgf`ɓKeip:a9yH|NC++OmBĈ#GP2Ռol d*j>r3Uƿxyx*aY}5OnF:mW0nbh5A Ȩ._Y5rMLȽ܅8^6Me7h?F,:v\c"k+d}nLmݥem"続N7N.wo׈)S/d k Ҵm[%n5nf~ftl]Eduv]FC<k>#i˧;iW%g'853]QN?7 Ec?_+MAF`4э`Ժݪ9v>zw|c1wnA9nF KtzEzh&NZ=ٿY*8$S$}4]n00ld;nɸ v=7ër㈯#p7L@-BP -"4f'=(TGtCU5X7n죻n͎k!5gJ u$`HsX1bKp4gCj/)ėR@ j9) oq/Oĩ(9KJTM/yA3zZ|H( ^ػ^*wjF2[O FՐ 1_ػsՇ*Ɵ0 a8Xɳ9LO~rbe YB+Ǵ:SҸ4KhٍRΕ%4|8.֧#Pjj(yFIyFJŗL>T"=GPSA:¤HIvLqI1Ql3wdRA*CqI1L _=\O5`W]}gsL7'\wvqg$l,eReFkLE.ppz~%Œ[̟[&(c=farv<^`bu`MyRuθ oCɳcbWNAI>NF|H q52I$1*YNH' \߽!sP0%)&SUEI-59|> C,}~a/0n.΅)f:tux:GP,HR6 $PE"$a3BbuP<4U"YՊcaD)L\KFt6LMV6Xw#?~場1 ]gwJv` eQfPQ3H1V 35QDx\Xm|ƥH"[mZX¹R+(de&B0w Â"d\ %ݐ^3kUݩ\aU #T `yV8q# {8e#|[.)C}t H5һlI((H"?39疀m |c y;5&]}trtYHsU_vѴ>]n\$=YlMEgTK"kKрPO݋Ëًwl{+1V,#)س`"8 nGE͇>¤-[ ԹHRh#ͥF9Øp^z<抚X <E@ՠ B[ȝ>fU\ ,t ZG%q\RG])\8`[%TVm{ m^A&ub5V+mnonU"mh n5a Cfe‘YvCQܬ=PaL +3@y\v7jGQ*TLRiPuc &ڛN;VF;Ohg\ݯFs]*.tUq3׵T-ʖָ %aZ0Q0=̽z.UC.cP?b>w\+ZQP.n.jDYNHVWܑOH6]|3>{tk]d}g`krn~u*UXЁ¬E/},fQ7&Wx;u%#__HHFzr)Es5Oo3*<v{곃%t6(po(7*VOj &^h&ר8Ȅuoc&]}HsƦ> ShšZ jQV׏qn~68Wr>Byń+AUY q)!#wh^H}MJqbMc\-qI6m?}LZ,-|*4h+m#m2Z|Q|Fi' J!HIǡI`ը3gFOLH~ cS`{G̵IRDDr)hi1 &uɫap0rC*2x:F+#t>GE3q60|)N.O۶OIÅ<2(IUP;V"+FdEDc0345ζn q'<4Wq99!b6̥.*!`j*w%mGeU?iC1k#Kt*^7eTsA@9KۃH EhL9cHg>ZQԈiHp`( V`<^hZr'|qZ;g d|YYmCRENWg>]]n **buvx}p#ݓXF!($+v ɬU|]璢~Mx4BR0I͞wr{55ȀfS"@1k]`z)o0fYD nRvЎ ө1ojdQtk&4Jо#V&'SNk+T*;8r DYQ}0T_C9H˦xmCEO]p[4Ϥs's2MtO[f[}b3O$ѭXwVIP?kle޺wyw(4폢gSt޺-ywAQ`Kي˙g]q9 C.b.(1"!H"םsj)'`q%ע%S:'UW% mJc?6ŽO;&=A CHk Ej`/SԜ"B+IЧ]RA_=U/w /o窃4@hBϷPV/Bڶ&"S R)b)\a#!(A8g`*T #d?kˆRQ% u_=ND}&D$Z0.FFmo L[QDNX'V<)6uh'v6>Ǖ +X[볆/Or3W}=5ӋIǣb Zh`0]a4Nab4*Qant1h__1oUxd 1yZԹ#0'ӽL4e$L&=IdLi P~Nȫku)?xP@nEO09Y)aZ!d~poEy֬]_YwdT΅`J':PD9X 9WrzxxB#\h 48ցQYCpۋOSd8bM8V%,Z,lY|Fm,L[^|V[C\My!)XE_R%ue額3Z\jg#fE%)Ni 6,r9Bm 6/j{;creR$UX1e9=xɴhy=%֔LRu93ⵒȸ`aZ hoFbfs0(`n~4#zh3:CwN(T Y?}3>iBl㜃f8zB,C8nt )$! Mf p _nSsoxgAX4B)һŅ#Ҕ v@ `Rd *.|hW*5o(ݱZn8;nbgB!-:{b{jAҙ҂v!^}L&8~?^egY-Y`ff#r[^E9 ":E>":v擣{ޚ7 >+q2- M8-3L5K.F2DXw9;2* HX-(D(FH:-ހeG<'XhLXy]oF_`QM]O u]Ԃ"}]r1B¬2諽xIu2[f(i9#HxI(<丁:k6RnCۏ<\K֚,ThfhCcuxU ݛ78ta*5o9epl;6+\/jgz@WK6FyY?Ea&m/$ϗ6Y1X3"Io&W߬J9M]2|Z;z>U-*X_E$oCc[Ds˾d]y< |8ՊIgX6].~*IECee~)=\րZ|uR5@(Sh# C+Q&%i8> $9R`Ggo=$镝7E=Mfs}ﵤK].;PJBpN5d?:^0g>:O*Uq9E_gSt޺-y=PQA]'(LԜ6(&,oNL\kU~?e\=uFSɌOOaB]3@F~Ui/.MG]ɔﯓCx"h66=7)ԙѩ2IxE1hLDy? xphLN?xMq|/_, ~Is1=\ÿGiLl~#0k_+'ja??rx0(O(EuA)}Ld y_I8۽o8~/>:<10vSW%0iS)sPuv0L?'a/* '|8pi1z$6bϡ+3,ayeqbQUgNܖO'D_~*ĺ- xOK.ΗjqgG38\D\cL`Vr⋣_o^ʟ~Uxy\ǯ{I)h2.}nwƜy/xӯ=)gq7{E'C~YxoG/~*+?Sq^9f-9?^L@v6I^J"Rc\,5M9e-k4Aq8_>60HҪpL&6bvt1I*_~{E^~09Ш+]sRJO P/2e{6W71l'։-Rۅǣ/2$5J`0\p7W/k $dqqpqQ >Uŧ?y ENj/?է'~~l2=.ם6U?n~1 .DﹶI~gpq3ifˋͧ"rr5Y~{9Y<ܿlj&WO9E"ʟ7{5~X-nY>2]u~ITC__|ۛrrO)#~Kô-nfWǛK8SQ&w/d~Od?;f.}Ao!MN_IxmHK&(,=ay§gdO_9Vs/Wټ^Ln'S;y͇Bnf_-$w߾~Ű|'\\}%,Yo͹dS+3 !&kXjq5_}>q̸ Waן@'WXZ|>Ĭavky-QwK)<,V7eXNCIgk=kb=.Ⱥ g܍KpxU[4 SI &AZ8f "//Fl*4ЭXT~uaS[~ n]vFX S<-hMQ[K/?l`YQq8g7ܨz3~0| whߌyQ|Øo{oho1P,FbBqDtnz?ujBڜ=a8uk5c"huZlzaS.~X=%OV/]$;CaEw;@U@Ժ,ވu ٰw/84EC1) `R Ԡr:]<]:% "Ktx߆'y."[D{k1$@k1֌LIz5۳ Ekgcz<Ɉ*&3Scc da:&R!'ZIQ_J-km<=HũVQI)^JƳI2߱, eV|̛8FHǞFApk$]Ci1FzFabwFSk$? Ta0zuǾן@i*~.CwI)QIvs@Ӵ$G2:\쨹m2&[CGnQ㈾mb\K'S\[ RXl~E?ph#󃿚*qwCMIę!  ^ DZ靖1aˠT+C5aBn2)Fd Ca R2` L)iǹe KK$iKbMƕ?Mժjv3z'>eZҡ˼ 02eQ_r]ge]k]&jqmÇPt|ujziờL]Q VQh_yQ2,/>cWr"|S:\$mZ^ EuA Kmy[2 l`9}ڏ/`$%z+5#!rvXS6Aщ})ڭGsx*yg-zU['"_-FIJuSr` SA!#$ f|)dTZjcV$7m׳*S9{wW4(UT™2ENPmYEERͅI}btဋFÎׅ=iin'pnv Nd61Lh*9^gmY[hCN wc4ά&.yOnPd1ۓŽ.PIS:%2y˕)) s楙,Ԧ>Y&D(N)S)ΚUt@(6JE,49X U8 MxZzm+zUͯ&$O.2j*ui7FvŠ>֣ Tտm=Ѫڭ hm{%ܱ1s9ژ7_^5[OOH%9w2yWZeRܳxnt8(ˊS5F NSe uY(qk2-u&R G Wȍ?GW7Nžys<3\I$Cj"Sb r(74qGK3sÆ#`(>}%]r5}4!bAc5nBmꓮdEwKRǖRE2%p?} , 8) bK,p]Q2~6 IԩT)b}$p%lsI=i})cOQA~3oWmq-pu>,ӲfLp-N8 l a 0Sd8`#IIJͬBkXpy GoC_Gw#"$܆4XK=JQ BYNG3K X ]` ,(80>5`ZhM]2o"8ˇ7|r.9 k('Efnn=.oy[tMo(S< eIAy*н26T9[TJ 쇞Ԅǭ&l_/.&vȧ~|:fzNfglژo.}[dx { \>YY5DwIS8x# #L]a`6TS &8F(U E(ث^Zraϒ0 1{?")5AYؿW4T.$+vU>dx~B)Dg)Se`f% o7h_kgymzuv$2D#p<<1kt} <9On~>SOL)H zOw:ř̘4k #A11d3-a,%ʺ]>YwUŰ,eŕpKFยJ~w{< #"^N"$pcd7V1* /ei@2b31"XyTND B'HkHWGS^hXꍢvzk1#pK8D`!7JSr k KH )U1 l0Ϳ"l'\)j ET_jã~% h>ނ{vF+FaN J; U6IyF4)jk̷cATe-B8vr(w g lIG|MTyq\Ô\{qtU.)^} WIWJ<(cRӛMUÿAۃf!0\6pUt@KХ2A FOmP {e!ZC4qcѫÔsCljѢZ΢J8bH+Љ^1c&Up tifwփX<)U'D!}ÅE:G{ \˦eQP- BIZga(^3/kfV|x[T(7nb4s&!nQittߺ=*84˃_Zr`lc vGA9 *i^膈>x)ҩQ)Fwn&KjZ &b;DC)OiRDݦ5tν==z y(2$?8o1Mdt\<1i4(h<ɨ0``Tk N7Z}V:]Z==L=bLjWp`nkX7|Q6 5 t|~k›k!65/V\""`wƄL8QMUO 2?OktϳV.O4Ggxtάҳϧ$#>UX>޽4X_]賉R%=BHGY\GwctQC衰烛^=*:g$G7tZ5g ;' ?7*:ThZGFVPbTWgGIS{2i:Y]ϻou퓎_ܥ&;~n%.\>> Mfg[mZߙ!v#ƇwcH't*&˭+!V\Y0Tb.lG3u w21 '8+ nS˳ oXsaG0q/G@dBf+D-矞+kFO3.uL̬sR)1Mt7ꋍBK% D_& ̛ 7 W{o_,iy #gPbL֒:y%H*0AkkP6bwf=<筚{e&O6`HfEubB5rE)E%Op@R 5*Sf|k:JL^V26oeC7+(ĸ'Q8S<-P|RFÒQ6w\_F0u P@\ֹuH,l7i>p23+RXC7qإ G"M{t'$Å寉%FEH1H .U*0Gg6 _PJ/Q=Ofn,0Z_|N%&UPbh YiLf97D麦_A{a-eAFmt~!j#RAӢNqAotw9Sk)c^DמZ?v0NU8jFqZ#T,H"XFXZ;A@@ ڑ,Ig!p*g((a)a " /PA`UbF#`ocШeű 8/9' sAE]Hia%#@h-V)P vx$%:$0|?T`ƪTɏm1NO䴜l!\,5]OVCbT$Aw|~^w>ixf9xOw?OэyX.Dz뮧 b|4e~}72Њр[sff!}I3+ wz4Jo5vn,۱)xYv&t zi>K|ȭ70M룊O]%(+YiQ89TSXr5XU"F*=j,ӨЂG Z.2*.|xj&!XAao 6.wg~[!8ܭA)UJ 8UQ+Ӕ$2U\Z)JVjr6IfWa-\CJҺ?,]JɃ769?۸{od{:aQy\Mŕd:<}Z,)KQ̔ qr;_8Fn1og_lIɜ36vDˍѐ߹)"g.2HF pVoz%MP^F{IjwDsMo =M:V*=v $j/9"0eDL!+Av E֟CzYv"Bc >bc' TYiO3r~|Hx~zDI+,rzr,_'MmyJ3 ݌SQ<1vp7$^lCaO|cҐ+:L.] ~L^=" {(8\$\L+)I҇II&b˪ML{:32w*4]f$!51|:ʞ t{w Ë*V +:m]\'ۘpOl(ɥsH0awN z0\RIŨWM<>jv j :zc6(]RDGtЋ^OSK*a.A~5J=EHƘRR07J$RPG`s#ϑ! sbRV3L&dpS2,i"Qq[էѮ.8]!}L(0 yBS|)).C#9& %2*LAs`UG*NۮA''`Iq#|F$h# kDA5SJmr).i\zu<(n[W|>PHC͎$fZ}xٺo])YV FXr '}*rK? =fGQ+FС~ 3rSR:x;WA:՗ZqG9n)Xotn@Vn>gZ;W:E Eph<ŮҴXjOnĨp0uu}@7R6b_Ă3|2/pY{xM,DT BP< ,A,, i>"swӿK@>IC#^$tPT2bLY#Ib+ho.4.,VE),x 0< z2J.4P.&尉ko;N[ZwX=,Ƨ/IZ)Ikϋ;׮P_r&a'i]]LJQUDrJkq{u=}p6Ɠ9ַc.B&Ŵ6~ēB#!-N j `ՅG S4.#c,B 5,.$G{.Px67uܸn'\/a-yU_jG%ZMu44㌤u3#_W|{~=ggͯе,OQoArg.Ψa;`GU*J?b39*⸸sŽ4s~Waۭ̝VtNp3fڥTBmND 2bG{ fѲȿY3_n6Ꙡ;?H8|8[CL#홍0 [cD amp&WCwKX7h%i22opS3Mz&,(̔ap0D0:)Q(ƈE7]2pc-rt'H`)[zM$))ȍ+œC:V y$ ^bG0!cPO&RH6ͺ;evz #DM#&JrR/12?Cì.K#=slp*SBC~*S,SH;m8#;ކ?J0q~U,~["fl^O3a˗ ΪgTk|GYwBpuxsr RbO@рƤlQ<鐊Bkx3,oz9]p3ZIL>sz(1y,%xm9=t 7q0##*:aENd;wlHڻ( S_y}u.<w ѿ2晌`)ZolC8M!-5=̞Z=< 困|]m\E]?_,5] zɓss{rW[=a~u|\_/8QUN^C>N7lGC?/U|'1eȥe!|@ٛ"H~c]l'㝅r)Cx(ж4M//K7q:/Vå㋰_̧ۢ{a9ufeO'~nL:'s ?TNdjz6'Ң^>} i k%*m뉩(ݶxE{h6}ӺŽDMj{WA`W"ZIc4cK(j "\#AE!ɠ%rZk_&B+ɠ>rl&un(hR Xa3kL}SԠh ® CN"|Ze9#9oTJei6HQiIw8p e?$*jIސ!JLm *>X*'Frg)B^aihV䐐Rl%&XQlXNPQܓdoHI8@& I%uAg`֞6UHl `X5ЭUZ)2chkQq`@cEZ[G RF*r|NZWlXB%'dhʍfDDSZ=$鵲Qbj \xMiZSsd{D4a/$2_2߽VO/9zk~R Z}2$6tHv>a3Phևە˿X7 `^TC7p9 fj|ӥHKK꽙W7MR<>Roũ#*~g^մu#b08U<@.Wlbzϧ8 ׮:@}M4#Gmq4fT(B s_p,27J#GE ͗b4OY'89%rY|%Œ|OHjh$vl H&ÇEX4Oi\f$|%[= 4mb┾9_E|trOQY+S;Oa FQ`JQ]!h5f!)I 5m߿r*:dg7 ~ͥŻ2;]~f }:ןmאqkZ{Gp浭vyڏhǢ$.NsraVd \)Ò4Wĕ@h5lAL{[kkVB+.~h{~.CArI𺋓D%6|7WWdO'zOSJaS,e $0EMngA;&t \x-ּ&waxJM{뗯wN@K/ ߮C\4$D,g".iM͉ݒn:VL/BGN>>Q!t2f 'L{P+Nyzr[>]hu(EuzЀwOn'siJ"τ9J.&T듄_'WuEͱOb,>VkRco5Ls``IElNI722yG&njB(Մ r=PiGD!1qrT Ni҅Q1P θKHPVc6yfE];M5l堣CQRhzJU*U5T zKr_Wg~`XߺhẌ́[=]O֧,E?<IŰ;Q>xY@Kt]c(wgw6wUfǽ %_w<c<]6FQaz{1.VKt&PƢ!7bԝ0޻dhqD8 W.PŖn^τ/O):j/۝_deGKL3ڌEQE*Z6_uKv=|__āzcXy֞hS Ҽoe$ 4^hFY^M}G:ˊ:6OOnҧ1R\,z?|6iPBsj4 w']*AvKF_Uˆ/6ݾ0orpEj^?߅]{CiNa]]ʇ6mPelOa?#Ӻ&^ǜG&^k1:!JWKw89d~E~)_ ~8%l6LJ{yL! NlШ )#j2*eui!vEi[K1,2z^goZdK :CwJe΄d}MUf6F. YٴM$G8x}*WK":h +p.rIH, 5Z G^KLF4Z!h &6s *nR:Q!!ED_:k$\MB}5n̞()ZA靼Z fql !$ͦUaJݖ۟>~|sr}y5oZ#?yޕ+ە?O\9.̡ ! wpRQE }`Zs$D4 R N  4o\iEA#94{5h߄;o tQy\$JO5ͥX;7,P[˴7I[enGq֮OĤF}U Dl"4ݴL7l rVqJ-߻S7,Aj1(N-m>iխ6C^9Ek8%YǹnVRgTZ JS[FuqA6ELݪZnC0SS~{Oc9&I3d+$>&e(< n 8utk \#IKZ+}o.r4\QklZ <4Ih$<\@Hޙ(LQzlZs։j1(זehZL{>phrVqJů쫛nͧgTZ JS[FuoeljtVRu!U=uy"ԭ 2:ͶLU=)Z)J]}ui13Ġ,tƦz™imCg0SS,~q_4_ĺ[-%ԩ-ͷ>nh WN*N!.>`OdDVA ujnw46mݫ4U7T݆`+h 8W7)[_A ujnE[56m%][ݪZnC0SSV->o_Ѽ3[-%ԩ-I6m'I[ݪZnC0SS黯nVE䴨Ġ(a@erZ-؄! -i1C^9E8;t' 2vj1(n,SDl<ݪZ2C^9E8v'M)=J 6*g:i0)f -3!Ur' MqX̴ 2:vj)N[=)Z)#y_RAцTeL;]N8nfZ=)Z)g,Wu;s[%Eԩ,͗en -V)Zũ{f6S7|,3[-%ԩ-͙.luCKmrwmmz`w_ !Ap^}xBph[X~4G6nqf#A 5SU_uuݺ1JM6!Z;':g,eu?yovǯ{]Ԟݼzsm.O8yw\nڗnW~.$.6,ﵴPjx!q) A+LdwU`!X@!kcS״7 'AeCr ڑY聄0{;~ љRbёe%4WxZY<$Dr${@EA!+O<"aspR.Lg+Q]w.kDfPzE#J3-i"80wW9. /Ydp3 zY=- f )8=^qr\\e6QOii!̑Y聤kbAňt 2F(pjzqsS-ǤNm]ۛ8@#n#<".yto&ג.f8 +A*Mx(íuzIK=7 &VIhf6bH퍜W(!7 ~=KWy}}(^z@!ZV{`e+yO*fA ,~Ÿ1@12i[Yefy\,@"ɖex_nxBaIϻ`+덭4ېm;MB錞: # )@B'L d+W@PŔvU+*4ԭΡ`HQ SgHWH+kF=,lwkj;uf^ ɃXYcSsR1&,7b96=hN*ԺTZ?38p~OLXdaqoz2U=S^hI xviS&ȝMye2<%Zhvљ聥l[.M!V*`]Qd TjrSTeog]M,1*J" 1VeEJ{xrq96۠0li<4|)Gth+ ^q>6wp,<ݛ*xJb kgz9Ã"zPNis9D{N_]<.=bb<*cFR^Yf3[kWyZ+ha7>.u ҃9D 6,0T]L%{=P<U Z`ari*|Q7yQt)P@#*dNeE1Mp C8Rd Y5Z9" # eȹ%Ge7#l+ɬXRLJRn<a&+gR͍G#Dz*JtI&\%$gcQ_}6.3" 4{(QKnQɭ%#it%6s}* xD{iqHFEɥ_w6x|Q_wqGHs-T9VVX)tN2(icN`6asK;4A ԙ> 4^hAf:LȂ ~УuF c xdVfDA5\Qt'c Hf;2ѯ@Z{)#gqA<\tF2+ "*PX 1w!f\ &0/ l%a=p|-arl z?"ME ,9RAA Rҟ,#ƣY0ټ4S'D$' K FdF6_B\$`+wwRY2>4=ਠ6LvY*U0H`T-YlgAKV?k-Mܿ-D)aGf4I]"4 dlYH=?/&+3vquw=]Y$fb&aYM2EW6ޜP ~_U>$?&AQiXN޼:ח'b]xƝ]=`z%m?s|C97B=/@ ie~[Wazb'OXJz!t8 W iM<+U)$*KlUaOxck !=a<ɏ'vN~^O2<'ÏSs|i2Lα&xwbd:`=!` 2r! h$(8`CF{rKxG1ScQ F%1PBeHpVȸ2 lQL*"#QT:e#l7j@W# )_| /&ԛw^Hvǝj#>ỻ>/o~l AK!Ξ*(r>:d|[ES߃Q,~(>$++SfgaӚ6sO :S oOf Al! zJHD2d0#* *JgR_ K^!m6ˋeRՄ~ )H7ekSE{Л&]z6< {iEv;َTS?8l ~ պ3In]Y&^\Mn2wh u~~R/]bQ'\瓟ߓo{X8I=[|~rg=yYJ)씞?y=3sbL&ElLFc0$t! .7'A"w,]/"$k$8jI'Ŵ(ÈV@5 3d,^$$XTA 5F`RA[KU=i~a#G^vwf(7@wU~:}K\}M,.xMJ2E]|teK*׸4ldT ge<ՂauzσAR@Mݙ&b!œ,T fg{s7#-/6Nui>!5LȒ15tT)s,YVC#*\m+T&P4Eclb+}` \f8IXN$T0}t*uy լFY`.ATjS)N=^ًEܬb-M镘7}\#LڪK㨦}R6R' ?V\R"Xl8:lbsIPjZT F׼hC1&{-JfUՀLZ)uUlUQi@VCJ#0\$i(ZrUO"Cl\Q fՂ5JJ3FYԨhƀy2Z TFxi,իmV!xebP)L݁XN݇ gs==j>HUyWu! t}Au:W@"w 6LkTF5]ϴy8簈Bwg.`^LOuiȯU~]T_?WZ}tL/gcsg-z$r~RgGggvKw/^F%X}ُ|<~WʓO{GmWSW.gXexrv.pS[Yvh>8EU?x2sJSeKBAQ-_nζ~WOΖ1JfS,h,h! -GAL՞[o8.vQYe&WL3x%bTˤB54N>@V]u| Tk%c5.%94eD'Fأ XE=V\.TGzETmj֣:oU 6W=FҀktMûְc-ّԝR')IjGk5 $USk.=7!6S4L-gΒb$^SѪS-N WD7jYhlQr!kS+HHRshlRY$Zv-*@E?jx,BCl3 >ɣTʼnh^@ Q."9g{ߣjDE&h>^xl+%@Q9Ƚ2ޡOUlB%ڠXL؍Tg̠@Ԡ,86ʅf=oI#B'#}u^LvVF>| pK iԌ$?NݪlbjMM{pLh59xᘫ:ud6+#ঝb\IcH hʒ<]Vբhcգ,&|^o lbrPM%qξUfoBfS%^6 _{WjfgПf?8].;vLY ؝ ;dp\.^ 9Jhz^*?;Upa4;}|v ~YtEBُ;w|}O?c~MkWƏZ9]'ˮ8v wD#7˿&oPbg}੸tCY:h'FH9A9gGEMސqlЫY#19*xk1hV9Q=é޻Aꃦ~ 0ZڥRy(4 ݢY0( i[Z` BGMdoQNJ7-J#JYNKOQNX[>cG>]oD#J`|`9:K)hCAT2VV=jBr#!fZ-1Rm#{RNDU%RZ@@5 ȇln']9[dÔR\ j-0K ţکРuzby؊l_#}L!f[lE}~ZcQI% L~<ս=.9e*k={L?*_9 7Εj"ca1:+e ݓdJ$AB|ˍEGVկ[@)0.pggRK]P_}ڟ?{NlouEl>>_T%0WK1w9/)keX_Ƴa$دJBa=PdTfz6?;\^l)lȇ)B՚O//Hh|.|"^{"dEy)G'W+E*XY iͲ#XӞ_7N/O0}}?|WV FUGf:~{߽Wg˱"2|`JXM8'2Obrd!߰!"r.|:h|ρ$;  :q4HDL|bA>6$ ­b#L+ ? e<$<hD qC}4#U{,X  8M!Jׁ)0wQSg{-'dž\:ZAV*cmH!Gѝf:c(&pe3b 7jBE~i.veЗjk~j~Ո_x4ԅ8 g$CJVۤ[{؝5RbX,~Ž3F! c'eRM\֦gflڇK?1k3v2H9%!}Fx1_ lL4\_!<tt ЙIwٵ B]r=iLLW42;6z]~;Hž_Y̾ݞџ}VWz0OFh#nfڶ\m۟?ɮuF[g]k~zp56qHOtQoiʔqnn"[KW2oىv[h$F]{ǃ3h/0A˫:<،S}fA~\` O-o lY9p_KD.npȶsxlm{(r3Ŕmvf ed,aJ0I.S%aXɄڢ-h]j)uAhtTmMeX9oߨo 쾸S~ymL6Wgn>j|2(DNP홻AA#1„uef hA= H&`4s+T"C3`#^RgeQ4{!}гf ڲW*k^{I?Ked.ӿ:}Xg3{>EmGz98?Sm1Kua?V)&i!$l_F" gV/__61uxb1B)B.Xt ̂ wK؛~~JI-UJ})OJC?4t 2slw =BZlYW  `b֞\u,>?r m c :to&9Ԏkn@ % 0얦:CZFE>Nj0ݔMH.Cy:ǻBїf&(@GJ;%n/5BfP#uO H*5R1I͐p6["CnJ@_V>qf݋A1Ŝ]ZRHīn+\2},h ~;fOx~p¾<ŏLz̢Lm(ͮߟ c`{L> +>_7*s֎,䕛hMJ0l -ӉFw/:un/<[M4ƦsQuwcNQ͈-ӉFwqatv+ٻŷn]X+769Xz  0X}[p-3JM-|zeUq]3i>{YG}UT^WV-.*\b _V%Νi̿Ȳ8~Yd&wU@3hp[G1JàA ,xr@|k${˪ `IZ wZ-]7 йF  [[%'fM[>nw?1UlHxox˳j}Z&էm-W1yVDSC!a+! d?M}+'l"zk)${ԌN)o2 ?X|y:օr)]&-ӉFw/:eÝn]X+7(hwN»bc:nc"8r0=P_օr) HP';RZߨSckbXJ:N JaOI#ix&1EI}]Z`FOS(ds"9WzǴCR_ץRجzԞa$ƴR깙+f􈭔y/3udGmY)*G8"gJOTJ1RlSc4 +J M4}ms'lVJ-V|*as/iquRq2QLuFdGmY)Ŋ$4@jƤ$h^ZڽsՏ,fzt|3fXȡ y ka-a+JgڲKsY8:i}0,[.^!v4X^G/(D5ȅ'bac ǣCrili\ H1V\0`mGHZ$S`VNva@L{|C B0õUο]Rzxڒּnk^>=袭yFGT1:?F 0: >h8@Dfm6X<;\ƀ̸Coym1oz{ytA7WVhlո~=.Og S k'(uNmA S\?OQՓ6| 1$h,p @ 3b6/~ZO[Bm OF׉?[9S1kvS#j}r;2 t*{*h63j TfI<͕1Pbn;e FJ5R%b@Hs%K"j`$PHfӔ!%-:T0D6Pam9 En%>,^1aqTYtd!Dcl ;ATC]Ɯ#zX |L'!mp[|wB^FǓ8w$l-/ӉFwq!y"|_ցrmmSf[Jr EŞ,rqx$S_S'36##2#0$p%q6tA @I6x*tACDŲj4hgIò^e9뀞1q["x*uX*9L6\]P)1hP QKWjRS)] Cخfػ`_]X,=F_M\  bBS c  071iBrj}^"OL6d_wj^<8j,ƣƿKM{qbr F^Zцb < !}}>8scdb|WhH~(+OkBF0\BBsCN/}na#X'W|lVĂqrJ$8yF 0ͥʘL!e ":4Hk2-nyhlVt/ %aĊƙ9nI^vj?̺5;1=O=IVX4:,X@ܒE&2?&@h)u!`0S:q^6K\bnEE괨|Z*^*kuT窠p*+ LATa%(rH@(-3EEDcyhQ*:G|T^o3w0k.ALR#KH|3,gEYɩQLRD`yC=S@nH'^+ARBC H1r=IJh>a o*Q]~.ֈc\iTB;q'tTuS >t b%=1h)` daJ|0 {f>1t)J_JBh+_ c򤼟vuK7Z硘13jJԏm7TRT+N0`jL ư9U.xi\B8cD x 8ki5=䃩WWnbtBeQJ:/j8+ g?%xAh:i|٫ц6FM!B=DsD ۧ?zc=NH2ݎP%]I=Aky8d.iI|ZjߜkOd,%QrK{9R`w$[Rs)03 &1LMަz8f' r`C~߶wSj:N3bۈ G`-=,лuWL "#-&8fB6*Myb@B^9Dc 9x⁋偋U2Uўx偋6)WOɛ7PܙY]UUnOpSìoF{g,3n*s%T^ƭx\dd0s.eO9:SՆsWW~{Uqhq|;[*i++TU}w+cp&ٽsyg.ooo8 eW/>?L釲g֙?^oٷqCp_G_ށ%GBm$G%+{i_ koiYqQZ6NTri*5eb './ B+0j!>(LzmdeK}%l 6D)_rTtݚ ,5Pk;lMXER J끸g=~|Zk"qU+[: x| 3 G_HHI|fr7(aC6"r2o.XZICV͛\=;4' SV'_C^T8 ??|YxURgRh㢶,4Ux̥!l VTr=˙8TA&Q! q6Q~{l4Y]ְ櫃ݬXɏLPRj Mj]~MPcYؘJj@3Z3F崮Ȫoɭ ݕFQkpxLq=t!7d]ؒ#9et: ׽J! A^S20bU^aJ|0FH*Q ֬[(J Ǿ~8Z%~=9|#ƻ}y̻gKz.!)&qn{7A'Až $>#ʻBGž^R:@+hSPۅm= ~;, L"L*0zJXu2=J@!/R+@~BQ* AΣj<)P:GRR*I0U9NLIoo`(=$ŖԚJgRNPʉCR_4VTJgRa(E쭩R@!/R'2p$8JjN(5J C)UJLhJNt(RU\:a/R+v/9JyϫR$Pg|9EJqE餦Ra(]Ii?g74h'_Wb qa/R+"Oy4$_WH9 2Ci%fpF)0Bu9JAZsN(5JvjW_ƌZ1~sF~C2uJvK}єz90RYDJ Ci%b?o2RV35 2JjEƟ7J9 C)Rj(4 +5VOF)bJ:{"Oa(]IiƟ1J!!uKʹd{85џ/ѷ-.Kͯg}ntֺ_0J /mEsy6o߯Z1Q9 ^ DyPN b=K^z ɧݏ?dShaZ˱5({aI$ I&j t9Qۼr}[ʳQAOg4֎v0$ Lcl音"qΓ;Qo}O6sȫ?n2ϋw[)n|G'2]o&1wUWs͈VO@P==Z&&i Dc渳ػ|wzX86q_ArQ%? ͽ71dQiPȅq5W6] >-_ɾ˗\AoI8Jޡ%˽pu `/r0di7u2Swf/3l' nOQ%?ns|r ]񺱍z3\%M8{3& ]J7 ‡J!lZC\"( NjB72$ dn4 L+#@㞞J*TJf kKQ,|iPIɄ͑Tgh)Ѩ VD[2#3BiNҁlK[Xg.Sw5|x^o{&=EV"w/)\~T[Ⱦ{(oG܍p"u?ܠxnO? 7rsnaSܒ?uE ds^3n^56l+;k^m]Eׄ0A!#ޔ!$ =q 9GzۦN0gU?o97G48Mf|K"ޗզoŗ뻏[[Ή2%n\Nla%ei ^d )JK-F!wJ^,A;'ײdXdh9W."@ SR(Rp {}J.L)P)fpj4$PJْpnQR1->ZWC)oC^4%hcADI*[STAXGAm?oI`G32{̊_r2 )3X.~Ȭt?asb3m_Vam:!;,z6KLzv ߪ؎^>|]h1D[wq8 gzώR "$#t43RyO7*XkqɻFYBHғ%N!v~tmi]qT8 !B[1!\"Rf! %RJ0ȸ,RȕP1,1X4*%:ǤG}#'Nɰf^%X*3Kd)Dc-%SL H6“U!k3\}RJ?f;t㷫Jsޜ>wZr4WO_-liIV ??kxHR!CEB+I37"FXHKэ'+r8P$LҼ0J|C\*Aem]$|R"js  S²tT%):Ӛ@L2AT yQ( b$'DPiIyv6>޾J=ۧo^ؿ?d*sb}YZ/ҪJ÷eKu1O_}~+ቋ}uww9#L,[ԣ?w|?>^]ewI>@rٕ?X* u?.kZ{lB~޺.dL3-W!k-b% ^9_O)3$Bj%dZ>lx/EAr,miYCQAN4څUEN) DnM/`/wۓeb .;?MFsw7WŊsO:Ms3??{2g^7ōfMgST9**R|fnw2!d,:`j\mwW$L9LteD>#ƻ8̻'z.!) 8Lj-U!I}FwsYy-=:лuW(X"0ĉT5S6)@WgW-ҙNCWGԦ1Ғ"*PX (CDu!wITd=R[ָ.}6*r.ٻHrW^Q)q='$z=zM?~T{ \M=d|w.{$N;V[W}l۫[_Kr\}yݛ7>{vYq߯7?Wէ+:kS=<}}Dos;Ww}^wl4Q_Óny_#!ny+3mi>}XD^3oPM_X o{/i x.?H]wxcbB ^?}FLH2e=JP+?o$w< NYQ IǵѲߦon76~x{Xz05c!ގl`t'TϩKDcaU\"FKh,4C2Q-N+*޶֬_ 7hm<֖kFTEӆy2% @D93Trbzr_wg֌5J4DK;2?ۙS_vXts8ݵ gNmvw&f.Ʀ*bবl )UX*-Xo Df\ׅD@Y=R6_EAܫl0ȁP;{{]\UU"hMEGPr%JP4lꍆY,y Pi˲Tl-UkjŖZ˪u5ʶ*:,&A,feE)FAj E<D6.P@HѴEJ&j[hUa45<9 \[#|~6x Vb`- hdEFo񢋡W G+:پ]9q,điyvnV'g \_Mw=Ey+PB(l?2+%D+e4;5 apN>'m{6)[iP5a &.1` xc> ])Yc_?f y(O ?zК9x"敤lH/adeq )Nv=\j=1nԨ{K5%b)Dm_Av:|I@4rͱܥz`[.1)|AЩ7C6rͲ)G;AA~F6gA`y-!rwB^fؔdf(ыK/ˁ\D/oe^UavxI(.Z!;!lpTfPK p{rN-58]eT>t:*)*n HUTҵ)U .Q4b=8]\;%R8Iy93r}:'JQ>ㅨK΋=/Y-^{+3/Oz;E<1ōh{Ka{lJ/+$D;I`rZcj SknEU7ͦpJ華&W9[-ԡdX6{!:?20tZYb'eBb]h*Elz Km]qV5CT-ֵKmtS RD#X? 4} g!{_x۫Scqu_ntX~w[bRŴeh"A),ҊU2xOAۂSpiʲh\kZms$`\{oIwt3ڕa y+ekIm[2ՀBvk0Ŗe~.F@;~2?PȖN@bY<䀘 Eiı /"vG+d0PűSxWc@d{o|Bl `h)xbs$LHw5W; &䌞+4ѹ3F/td~}Ə6l|ϣ)iOY:jFPԜypϨ8 Zpo*2m3a%c1-:Nt9vZt-ӿ/X$=Ŵ*[a>̴-!:΅?^F];K|twZ/(}Ie ;,샞N'NӟTv*5 7Ok ȽuiPd{,8vdi".5&`5E^kѧ 8IHUdlInʠ/yd愸sB0'$,>4j / i 1'H bBl _ ~V'cݐOڈ)KHWPƒXq|PZ+?o\w:zɷ}onAR]䢵oX)}Z!R#'mmmDP4-cqepWGۍ 軫?ކ#bmW8H`!l(`uq !g$%~~ M7G?}f7]ܓ{~ڏmEnpS/i? w 9XuYc;`|[gW3rJEO~H*Gm#($muv89:Ò}gh[4mkª ktk-iR\XnrqQ/͖Vlakgڭ`VzLmCJJ/uJW6Yi'.VzVJ*JI+5̋cRlKJJ[낕£Zqo-k+=k+R`}֞97R;.qy[8+]\DR-qVZz+=G+5gxVzLm:8s+HO;sN+H%{:g+7R.VzV#TZr=WN{}W"ku7pQ$8Hi"x *׹TrlIJe _:~J JV8<ОS.?bs08aRf#F.14\nkC-\kmlc */yjX3JR +=ߠOCߒR5堞MQ:<UE\DݠBa>YA`9^cu?b/=홇}s=_I,1I_;%h'IwbL@4)Ƭ^lr5E("mktEY6ڪ0`UBkJL6;{)?r0>' wGF-.lc2PU 2FaS+jhh"a4+Q(/F]4T+ϗ73.uA~&KOqݶeJM%+o2#EHF1څ7Ԓ }njЮ{~!pʺ> MTV)ԺQҴP90/la`O1k?:{q8[q8^-X5D0ݺq8(VT ;g5n;wU(Q,<Gb0Srf+בq)iG É%g80נ N8 'v16+{30_4.gclN OS_D)1oգq"c) I#6?HVRF6eǏG6Ihp]8hP$+'8WCIj"/E/ ;OLА >@L V.RjRTA9IRk[aRgBY-fGjۛx{s!aX1 1Qnb. )]ÿrU7+}u嶾!^|c\ {VzȻ~Z򰠷]wٟ:9%Ia_u7}IJm;A{.!$8cMgD: ߭GDcX+7cs:;P4?T8hqi NEdzta3\ys?#^ĩ}Q~j/B^)|5zwb2|>O2|f[j3}$Rꈗu}N+%ҵb/VzVj[V]у/al3RlKep+=G+ QDMxfz30{FқmIsPQTg2uN]XQdX%s*%} {T}jJz]|d%H,vX5)Ug2|Qixrߖ5!GןOSƲk(@yTյP\\ڐ>k~ {N1,VZM9=e~g^ŁdK^herR_׆00R X ~e;#7 X/jD; *0Nk:aEuYAVkTVWBYnHt /t|v#S&BpR BGpV%R,*k1,uKb m$v *[h+`[Rq5)*aYSLQMMjeʲR,.CB۹˒ sF!PVH/9l ܚtiǺQr]@ Ib[ЦaWETP= a, gMA= ӸgݦAX5L[]?bT;+0hQܳ_ըP{?&s&+6}6)cl 6=e f[j/Hg]#3DݥԄ4Q)n,IN%y[r2ؑďL4&Ҟylj4qG#:2]1NG7{$q@N5, PFo!h89=i',C8W"Ch"Gm&c'Qgca Ks^*k(ip&upZH@`A ;Hg̸yy&;G،a!Dlʪ{MP/=][o#+¼d!Y DY,pv'`мt֖3IߗlvKdjٝɌb*dL 3ш=xAЩسRٳ[7nY6u3h[.)&혫E{nJޭ y&zM k.F>>R#%c>i6R2qڌ8mǡSuW( (iCeO{ļm!*+etV*Y +Q֊VzzVJhD&|tgRc:hh ҬAu·cZ>R#xF+=E+fPAX)4+FP|ғRiV1Z)ðRiVZIJOJIsk [dzR7\3i[iT| 4)>7*i7Pͽ$-%PKSQ(-=S0W7٫Hw~q୆>|LeS#p6y-o?ܦO7/u9{25#_zJ%eCLsdao&OTW.p]̛Mp?Ssr W{խ&G-۷wXw} qFvs'ERM祷 ?]xo#]LBePݔiAbb~Y?߃f$1kP ƒe~DR5Xz(}tX)û\%md}㤍.ik}h-:PΘd2:k=|3!(iBdv}sy5~6X^uiR_޽7:r?KcuRЇtcwqNSNG8ZByOщlw^92/x-~/Dѹbi`9R\B*Jn4r$l?Oj䈥ְ$ xФqipeq_]UPd[:/=6:XgFؚj{-ӖzҲOԎnbx7 -uqdSo.Ďљ"LanbNӚ2{rSr CVAԜJ_k@y+BrKx Q RKBKz)L3~.K@z} ET5^"B%g FlpyPC`"i&a L 5lmϷwO*P/ 0(ͧvq ժ7#<kfbDZD"Hzx'QL<%fCR?KHsj[w~ږO3Q9ObBHJ Md6ISH|$z3;CɭzX3 $݃W$`IFvr^]ӖSv:ى-ӻ' JּL/9?m6֜$b'db'䌃PLUȄU^Nv8`8GF0 r(TE:/u b \Yo a 'H/1o=#9KJ 7ۦ4^]3eJd.jc]2D],a m4doB? TQ)+(KBn-~\C 2BGU`_QzᘝC/%bdVk?&_/.*`!g(`@:ƱVZ$a&BPYhZI>0AP'?<]b-vY"3&JEQמ#h j. &o, 5a L5Q*ki&z#z' 6Mc GblL naQGF Fu+/k,'|CwQb"OCEiڥzyۧPgP|˻(Ʊo%` YhURzEa.JSW!`2͟tRwұI=D+`Mhك+]3QNJ *{AaoJi[)$Z)Dh\^@26o~o K5ʣ"r˜>Fz>0R;$=zw*!mלɌpGv B:=pST6z c _/b:WhJ[9N1fj/lntIR5Hԫ*/Ƥ_lJRԎwuQ.&1֍^-UBtхz.\䛣jU_p?7dk-f_ꆃqKt”d}P\RH~sbV;J:" #TQj,jdX DFeQ@JOÇ9C R Pz)*[?bN#l4Pk~>K'NK\I4SBpBj)%`4X( f p;cEas` Σ{{۹{̙_P%~&aRZ?u,/V%|Ut_}w)†qO39!Iļ6`ҌcMmZHauw "8U J )"=#$ KS^A-M=!Fgt, O)8/+c A"|b 5a* |V^sveK~yd€K"w\ lUˏ6 3 E%~>|W)qW_t~͇w09%lZ?&?1L wFhf0 ro s$}7(߭՞BbT83"TnW`puڊ^:r0Ŷ'3uO>»QM"sH\ILV) / ՘I&i!hhɄ*~{ 0t#5;hoB$F/@t}1!u@p{K`z. `XJvF`Gڎm h5ѽqa#}+J i0uפDOOȿfwûȿ^/~7TH~gIp_>ݻ {#iEp_`l<׻7DjNUmq0|Q~ـmv ֭36'qkT $Z֞*ם*A )ym7ĐWa 1 fxϒV~p[6 z ֶ}FxԜpM7VKCbaxNuUd'E. }HC\"^fɵ4)o/'I#wX+s{c"Ӈ\/8g[ּ9%*Hq_NSkrT,6]QNEB%*JXM~XMRzsm@VEMC,䍛hM*$ u~(GnN;x#F_ {nhޭ y&eS\>|#GnN;xc"¼{nwH[Mtæ('eTLzWS15CDbEj;C`I{ Ϳr0O 51mI8ʈ 陲 cFq,0 bx֭wkM)~C@5Gow!L7q .ePe뗯 dJ܋bQ0~4C_;Bͤh}ۊB9轺$mOZ)NRE*HI_d~}jȋ mmlKC 6ۛJTo޽Eyovn ~8'4 е},EbČIVsyriWk(4o=8\ N-=VXfj A4RArb uՋn5G|l͒Fʎ %}NKRюwv[rv'6gޢ :ňVrޖwBBcߐylƼŹVRt3HB;ANi;uR6-mDàJ-i!Kqt1,3"}PQx E^‡'4eBQ)*Jx XZ83*J3aj$ B1)rcQ}əhnFxԽ=sSX5څsJGnĸaX_Wj s 2>׃Uz |'3 !DlqEH)\~4c3STa(߭؞L:f{|yMè^Uh+vl[{QT3y{bI* mRaN0D05|K}/V[g@nЄu'e. 㼰̣ṙɵ o~5@2+MvR' IYu,zJU]b,?{Wƍ K/f&/CLn2v%qr*.xn_I;@/lφs<,I/D2#C( mö r8a$l M>P0/HjxABFT=^ s#Y{A>b9{0H9JXo<)%%U)-tb%}$rvقwND <Uhe/@dHZBw]wxpA`>Gf !T QvZƧ NԪ^FD2b.$)Q$I$J3wf U("m'*;R=ZGX rpWZ#O)>pO^gTWkrR+v>HSWbyBDB6 {(UPT=}-iAҔ2߮ſwW]U(ϏY4?~ Ajlm!8:9@@p_Ȁ,oJaZq@LZ˛],ƜmvMY[,Ұ ٜuUucd͎Z+ngTchrl^3LRQo̷77% >Pm#<ؚ o6-۩e$e,HρA('!{+?zR"vW޺-eqdcS*lnҁ,+7QNv_wC>x7_ؘ;\[ɍҩy7-|&cS'̻ S+}wpt*pkͿ+w a!_ֶ)F };:>x=mB޺A +Z/]hB@8DՁ^:DO?!z-c})C4Bh5&\ 8a-:m 9p?)c׆Žk`Bv"n?Nn/ TUm5Gm )&?&ֵP ?%m{ZljIk?)%mqI&$5*ƻ(YuOX`  `U}pD!9w!Yy!~#I8V& 7c*2ލ@SA~jiTuXu+#]wTtNI{eA M˦8}n\>x7_ؘ;\[n8.[n`-|&eSgqz7jty*0z)wp&n}mshm"@I/67,ֳxL/zߨ8FEQ/hz37իonMn!$0vǘy?TeJXCSh-A` _,C.eހ%:7C9"r¡S(7`ϽChwr&]EDRڿQ Gǻ35!8zPh%=U}-ĤMgb+1xGEnpV GwUf5Qm! G뛼uh9u-'#PᨃF_ٵJ#@uv!9!gڳyPi1C˻+-~;@fiWC7"w5z>TK_l6.yۯ>ՌH6 +5aQL3+;H$&",D qدB(YJT8"= KP)`L.;Sy3b`ݻzn=ޭZ^X1w^)R=N—~j+=G+EJJwgu[1﷩\rqE^A/sI +=o+%JI^X)vVZP- $+=k+J(|GħJK%/=k+ŷ`T^8[Tc "hg;Vj.VzVjz: +[=UT /mY)JcToS-3qRr "d/vS}M5?o+Jaq".酕BbI5b ZVCApVZڭj*zYe $Sq )N&LL22,#$q$1$*c,zP{hDSэEyWMcYQ)zz0Rf;#i6,/? D|Vp)nh<Q4̓^sW_,_(sO/wBx!}6:-ۗ[>"lK4sՆjL][h =NKb(d"QmR^ IȔA봟>x.DA,1(^|9zX@}TkI^&vZO6AgA En:wSqှQXߛ~fMcK:6 @m]N< ؉Ԋ~o9;q6y6YإDeM=nXeqJLANY`+,Wya{KN`Q⌃D8Jf IVㆊ|nlom\S",*$XI*Z*~ͮrϳG6I9̈V,+Vr٭ ꓍ ^ϕ˟@:̄-zʞYy޼clT=jAR l2R;2 *FQj2vp;SZgbZ0 1S8] 38VJ%۸^*a gaW%@y%f+,?!>L<tA)!yqL < n"~2bjSG+kL won_aAcŃ1ݚto4LGKlֿo$֠V\\0Ty1b80sL{ E:I>g$bvI>iv@`N`l:ʿ֩o;u]ߟ+?E/7 @=u우 ]  h| P`}p Js8i\sbZjl~QBjƖsJ59Tz(jǓ=.~2lHMJG̨ lNm*SڬA5ԤP=s&F n.)uIѫ\#z-j2A,`M*\d 8gI9$B!sIs  {7k@U{z,޳"No!Xx=ݩd,LDI2t(- ʗvP EJUu˨TVvx~T+r՜fiv3ն9umʑuq: Წ>[W~2>:kT ah4TVCu:!ҿe^g`M0K%-%k=u,߄J]}JF[mK=Ow0*krmm?ysŜD< 1:H֨Cyt dz=2D<D_Pk4 8MOԳxnO)n<ֻEf?Cy.$$,,iLFgD S )1y2So˶x**TC PB.J-b&df j\ >Sj JP`CAZl:*7z ~1b﷉7KoH>ҧ1ϫac{G LDσT W|^oы FC֫wUDP+qeH՜(8RBcUauH){V5l^J]xgeuk>SסIwRH9!rvTD6cV_M]̉05r!{pSֲeOz`fx\N(RKqދt)`E: 9 3lj g)X`cI1Ɣ 3KcLp&y`P,/9H#(!gTFeV;UK-Ϳ|dH>KBQS ED1hD9& Q(糈 0C4g1^bHlcC6)˧+R~ըߓ JQ4^jH\o dt}-2-)r~ 3FFPdY fQJHE9d3gPWŔJθIbQ %]9^o;^i)9!^Ӑ T sHcGL%J%+JY(qX@c+'|5?/wqVT́ VV!x|b9ϯHA?DjXyf?e<])5@fB >JMl^ihŲw {t :ʡT\ l]5Ɏ*|@ zޤOg, (%<1bI!VSO]K}z!SF oU«餝XaAmvl ;Ou]fy`0eۤ9@Jτ9`*Qkeݢ7[5ZYVVhKRbr(ҕZnω„l9yUW :0Ֆdv]8bT:q}%Ƣf#9UxmU$ԣ{>nBwp=CǓ"P"VY٩Ѯ2iFɺf:NU/tx 5W\GKYO&qZROס t!Vmu$=fk,XYA8择T(rzcti=U\rBr)HO#ɮwrW6avtڢw?n!,+7Q/wqW6*Hۚwʰn!,+76E1,6kk DKak0]lQ6Qr,h߮K8iQT8vVح_} n):7mQ̨lgF#,GdA#TCgo}s׺ 5#GR.F`+ɕ <$!5[Mm', W@/@iX-ZB`G/ O80O?ghx VglYl9 (00[)\aS? rز2c"XH'yYeds6*<82,)yaP̣()p܊;xmX`ioXV]{㦫y{%VzF/^!^.9Q9Ǵ ^f9TqE}wٲ2 q|O mFGy;J>sK}ikFbtBV*yXl0}D3L2@G:7>}.~x+~/~wȑ'I[ O@}/LO$:_)LL;ۏrz|2x)ALQ`0H#Fl7eM@\cFXf甆 Y] -%`:`pCଃE׻V3.\XFMc)Rzj&.,]7KwgQ6V9d?O]}_uwg{sZGPj TM8^ŀ\1dE\2 M8%_=һ0Si{e;:4!J>Goo(Søhmwp}+]U^5Wܶ:.Ɂ|ch2R^BjOx$974 dt4Gv#Y5%xezҊ#HeE/"#O,b*D㻏oFۘ|i7?~R͗Vyǿ7HۛO_\?&&& 7b;-J|sܥZ4:ŦR ګ;Ÿ:Ӵٞ$L:!n|z튽x{ }ua^j36w7ľJ͆FI K꣏OK!;[cx{V nlK̦"h{](LZ0;YЖ9k*j. cCDgi[[A`4`QY?eQbF>yFjmgI Uc K匂 iF[W.ۙ S372օ.AѠ8U.B - |46vY_>kv!Pb}m0uZv~LĖ ~ PJOoAap#{@HTQqLRFYV[m/`N n  hn d4Ak4E3NYOV$&q;Ĩ * bT$u$ά l DOWkS?2@wd0R* % =Jۏ߾/{5VvVDd3F-r^ bwiՃVÓeyJOwbw),vw^03 `TRq#7t_b|sK^COСvEPs//ΎB91hm+uc!J|E8kNs`:ʋo|ם\ςL]8: C#pOjU mꀚUkk6(kiTv`Sc,( Xgk5hY^$1>kC#&ƈ71FLjbluhv諪R&e+'+mӵ5-vN{ڱAIҘ05ҫ0^ ٱ=@T b41 ch00Y 7Ub)06!ҫ(l}Z څTڃaRPJ[o\9`[-JD<rXTmB6]4,Ats4ncvSiہĊց{lĹXe@>6uhDqjcXD9-{/FFmwRïI"$nɤ;Pz ,d \ZohN3~Nu;9-adc3lwme|@jze'WbC-L(ߕ\|^H ٕDyyޒ`yÃv|ӓ{eyI,Bx<48Б>G-D7d^|V[ 8;-f {LT;TA@Hb-L < @n~䵤W<+ívB9C5_p8紥׻&S~ȣT(P?qg?KNvJ/QcB4b-}r4ES'9Sh\K9 [P%`2lHD|;}c[ā[iswՑahSuW/8ƭw6OjͤG In9p-za+~,9GnGY@|J?` /1OkxY+`W%ΥnvFteNLd~&ɩPty_M^S>}{o q⧻Vۨ~az WL7jnOl޶ݗ V~hOc66ڄsUk~dۯmg6so#e33Ė]٥o~0ۻ<B^$_/\٠\>S/@FɄY_SBR7R4=T$kETn ]B m_u4׏<Sk]oMK2+յl( ܴGb_#BP{dmMiVkFDG 9z u I?#I'5@1+ˏ"\>V \+7K_cHjSHpJI#>[> 99Sh`xS^N~<>촁@0*mS2}K7$DMX[~DlMJ[0Lm0E$Ny%R@s`$_X {ή*4<#x-}֤&7ar&\Fde?V#Wa KLJÞe. 1ά{α̽H7 `o~Tr`BB:GSWu`wqVtSwN7ҡ>r` =O.{|SGth^EJ ,2w-Zc[okW'VaAcxoךu]c `D]7ᗛKɟ_vڀP{z?aPz{wojvbڞlv̴f bR!{ZX1(Gz*LլnijS=oD]\coyiD ϣ77H-BxWZ!*u4Zn+pb0{ǭJO3c@Cٗr*S\[X$ lm,٩a m Xwht޶A@n *0nPu.T)m|5b b~ti&)Sp_: @Oƞd0T ւCWn:8eU{FucnDc{Ll`z>1%^F&9d}gr@6<k5FE5~tNƏzx~@>&{%Rl')>pWZ-Kt׈gN+sFL$i*>ٙZ/cn 8sf#AS}4cx( 䓏TSP|ߝjC`_eTszx<K}G 䄖p)hx{t dU4I@~'ٵnF[*BT'1mBdh-$к!/\ESt ܱn! DuRcݦ"h7Y@ևpS 7yYm_=~nm:Ċ29?z=`~@01uRGF۬OыzNЍkz?4i;/n\_}s_QW'otoϲ7W,7wo)NpT8D3Zwg%嶤\({qK[ 1ShEt'H4E} -"v6"ױ-N(>IX{q6y^9I9Hn t}o^#1]L_DT B-l t Fd{P*Ȑ+Y9U L;Z?s^!BKsE+rSqj[]?IC^qc"B<Y*(PyGCixUsW R.ۇ!AjVTnvW#J7wu]?-~|qצ判U9;Sk2Cp6GOm8wgJ"lrR|+Τӆ-ޝņ7gt y.H@-~A ٦"qfoP|h#^YdOR/`|n /R!05}EAoKyqN`5SPB7On izMy%w3!S)"GW&LV Y8,Xi97+mU}Yf'%(|;ԅ82;QA :htJ%qNM`ꌋ%cI?b&)v'B6SD=+xZx-fmЂ 'RZْih ToJL$ocbE2AyQ5)=j֣^У$13 s<) 3N~zGf9vucȅ'=pwD7PwA$8p3 hC#yh:"$+qwA ,.j0B Vr3P!5`+ET[n~nSu`h<BH}G ZDŽVdh44ZGчpM);z׺9 TN;bۄh-bGuCC^&T\=&Y % *QHkF!i-W(ZUNP+_BU/>-˛z*so.\&‘ \iѥ(o|r2x>Wdz?K_tw]}=9_Rߎ?^/ah,Y:zg6:|e>pFT^ݍ~_EzFR葽+[SOВleSy+Ql/4/Ti2i3,ͫةkyMjd͖,2rK-at#Ւ"+7+Ø9On^jɤƎ~|Z1ZGJΩkg"HRXkk/c92EiݎfkxGq&!4lZW=<^i@k⯻e [==/\i>DWlxR/B~(Em]Ȣ(̎6eׁ ;jZITmc5Q,MpoX߽Q"YhQ=7ca> #UݩL}rin6{A.ЬN6LLad5wCiu *A&*̦zW6D!/\ESt >cݐY DuRcݦHqvh5к!/\EStJ6s8w; ȃ I|Gu0f-|uAC^n)Tʗh:?haO]a ZZ(&kըyuUFWTX&COZ.W(9^@ SZ /ܥ^D)H3}/`,LfzP̅#FpҪR@:JU Y /$y_& w=ǀL ryI20tLDk%rlT.MY J6sD}\^]ש%n8|>ߎߎk-^n>|\}U"~: vsw8{Ү^#gB;@-מ\26)44,P"pvFAE'+Hp88S4QmQ V+DE ,s6gkdeRЬws=TP-:ԼC3„i)JlZէT[z׫@^:wWKU[S-hujAk)0-EVF)TTlt}R{'oҧ>ݡ҃R´TV+5 -*LKk-ZzZhK] )%.6͟?6KtR _50g|KjBODS푕o xmy?^O~)o&+h4A,nj2$vłLfߦO NV⧰+lVJ"Ѭq#}u҃H4jU,E$OJOaW$WI"S>Oڔ 1-RyJ=h~; ^aJ$?vjx~aQ!" lkw4XxIC2EK6rUz޶AI.S* ΰK hl FzGЅ k14]2v]($PMOS6J>RL"!(amHՍI"V3ÐnO!j lPEV+Ӡ]aBZAl&J 9rW6"pwϑf#ZK!{c\8rx zx$Hiiո>8rw zzt) rD9f lxxشz(1 433ЉpAรGh+ rUAi2V:ȴA\ᄿ~yA"ϙx!2Ud(ɪ1v-IMQ->F.)1vP>ú$ZҠ%X)֡3!(ڍj(sr]Zh!ëBgjpi#+YV1ii2_;vR^:jTL8ͥ Ngy&h%HsMF®HDft*<(V(B)2X:D EO*>"In10z6fRYe&Roԫ;_Y][s+,% qUe+g7&,yIl}H ùWŦth4տ|Ɠ_E+}ޙt2n] UJ;7?uQyق$0L V@K8 ;;) , c>ڌk~yFr+lLV0u'YG D@ZkWwI|D"g<pvlSl:+Tx$sNCò-Xp4f<JP c"EP<>KXBIaB e\x=! gOO$(K& ȏfN#"[X*k3@2:[dx-ضST"d &$;ۚa,}|o#rz~or"V78B06ʹ=xm28o_g b]+ 0nN )Gf2:I i(=o+Q÷]#; (>gХIca0Z-ʽJSu{䘪`Yq`:߸vIKH^- hZfx,2sTfG'X`$qw &y4Eh m+>OARN2sn"5,p0$IdN"@ZzZS*K]T_թV\ĵ4o}Gq^3wOK%Z*.Q֊%-ft7WuyғR,(<-ESRiZxSҼ> QQ HZJIRÖN5|CzZ8H,[J!4RSRyjq op:R tғR򴔠"rR3)J"e/=Z ]B)#bDt£`ئX"N A Hsa]zHb$5$^;E"h=Շ.|xiK>yVO8\6ywp3?_|_7vbL?R %ߔm-?ʙTՠ6͝m!J)GU*+ ڲrR8m%?͢!b/qi\BVro FgMi&A*|3!s8+(<|5?`_LYċے?:Kzvv1諵1"E-AQ>ޅ CV|(T>v6Eߵ{(%o+rxf5ConRrN 1[[慦d/^kQn,Q|ʹ-LVbmXhHdLRIhSᬡү gE0zn %*xRIԁpQ<p塉WQ%tCm{H^b\@Vep" H&fMvs9zƽ8V]+nc?`<X 8($kMv϶l! /'!z>Zkr6$ޜp@LZwߵLwZ3Qvǹ'C<=,{="8)D nky>޲qm_g7D+,>>^dt}!$&G6j+V,GN3RPJٽ;7h3X dMUfш|HpDpĄsxw\ 8 Q8IfzRu(z97T3ԈjM5X <%3h\0y0V"#LIa0Þ,ЂjXlD5`XpA&Jd4 )bWtjIQ:M]#S,sa) >Evǹ߼[2)1@޳uPt`K%E%,Vc j[j SHxbP@"8>$̒dz6~t„]8SQIisV[b !6($BJ!rtrbǜ7v6^z+dY{.^4')'#A0*')+Xk b,Qxn5 H#1f19F/DcQta֌ar`c12D*Sj"eoC֌o7jMD@`ԺU#D9-#qQ/c6bkS`&}4!< ,2 ` ^1$RsB5xi9mҸٲ9_GS*7_pz/WQk@-u\b> yE D ?ON8cy%XH'S8Xa _~x7=)$a5Wl*ÐY2 2sxyKp3$mao&C^eOWeXICj 6T3/FIFlMpjy!$>jz1'%-Xr*!–N5VwEu GT+."D~ζ;TtwHIaɖ][|_Ft~ng{wʊs£k[k. ˀ+ \'(~/ݝ?.|q8LxDױ,2+z˫u|XɋKN@{Ԟ`/*Phi+W6:E;ucZ2Qhb}EL+nC2[UN)q 9 ߭ rT;XzC>o>{ Ӻu!\E) q񰼣]rx\~uAOEq^%%Rmd~3?|Y_ ulz7B\Fa=D!pyb'xR!wSg-r!ѯ_3KrtrD/-o=);,o}FgZ<_;~ier[5k2 1xvAr(Q̸Y0b•:po<ߞLTCn+b9{.0ER[D3ħ`5)1XkuZ:!RF:C\Dsi>Es"?[ EDM0@勘xwmKnHWzٝd qzk&6g='O8@ծH*w{7 UUu䎘]4ۭgI`:?+b@ D:esKfΚ5C}ܝj`lU| {ک$|Z 'lU`vʇD12F\p !Xj(Ԑ(*zDrz#qB!_Mt>DsǂwyօRu]mH- HbuIڌ#T(>h!Q|tݻID0#uŖ B^VsB4_B!JPMVmsD~#Ļqcm*do->ӻWuLQO3(τe3S}g>oJyF/0 =7JǕ{H++&.y(h%{skewS>ORm On1T֞|dY02a1Iͥy6{X?,/3ry:e"vtLgz4' ffr NIr~Q"O- {\n:gt9~Tۭ*gghXP4GD3N#"*)Q&7F7)[<\/K/g-34붠-0g>LɢE v<=sS iw]1\3O╤BMFyuMY_b16NPذF ¦IDLA1$a6GiyP?F}1i>` N"RcfKL1IJmD y6YbãV#X\jNj})dC:`~Q\:AP|</V^&6RR9 jWìუ|YB\*m+Jh, 1wLWyBPDTKƞS D(yԍQU*+@S-m'&%jyN44C\!E԰\aQ5s ȵMĮ@*{y$u{ I|w$Z(byz >KEI*2\ BDʎ+#I,k>ǔHIV ioٛxM(9-oA*u@Urgb0Zן60dU[P*/MU|Q4PSa J169$řHsvX&#ͅIL*OSN%fM91/ՆԲt^Orѧ(SE@YHC5INjQ0fG,h;c_3H 9O:TLV )` O)E` l0:PW M1% ү@v Yuಘ((XYU#S,E 6fs),mszŞ{9E=ްS]Pϋ;JaL}R_ե=Oѓ~@%Jg{!5!켐2J sWD~6q'RL#\vAJ ÞQz(% Z R*PJC)l('ՆKOL)goF PʔJ 'RY" JK}UZr$(=iC)8dZ(R'5A+SG)%~(>Da?: BQz(e+>+V| P.8B\*=OWRKqis?b^xJ1QJ:6JAdy}R_mHљQ=Wd 2RCi!5F)\/A$.p^Oyݐ眽 iWR+y!=mbR\ܐbP@)V~(-VKOAA ?RQz({Rݓ1 PwZN ×2RVV3JO] xnQP k5yP&׳<3u6Y5yc\mrkCF_Y+\>0yqb4,, 7k-MFXpe:Mvܫ3~Ζ#X>%G¦I0G7Ic^>qDR ڣIaI$IǢ1 HԊ7^ ate S {8 w7k#*_]S2V.9z똎VǠ_r-Q>%k5ƀH!j &93s;ZEzz 41+MTm%l>˿teYUo_F|H$QX&MNd77cB2&䢀! W`6\V>|,W{ `Xl˭!:峋hhFTvd?hL@xGf:MݙLb&/?og=ϪC"RlԎm09{Ű"D% Ìı #**։_}i B A2qj CI Hh#f2W %_:OBG2$QF+X4ˌf)WJx R!VBks,O(;e' ݂naX+md3oϷᑢ갗XߚǨ[#vj*Wݵn{1D EGjbF5kOET65^lS}MjFIMl0yOƘiz7|e LT}К/ }ʧ{7v1fQ[y|>6Q8xE8Sxt n)tvz7HPcL~:;:%?.0ӱݲfE-E 2^]II*3@R|8 X{mZZuS"[naXdšL*OrI5%| #7lka=_!pyRR_եKOGxTL+M0:A sXVe8gy. "3LkN  CJiyHG8jR>T[R !m~m0 [w FX=R %`r4GV11! $FJY"d4mToLT(&8n,dֻ6 %< fAMgYQ_9 R ]NsHU\aO)bvu+BmЕm^ ]խU{}iەg;#LZ'^㒅ZdL1& q;IWRRڈ6U9s)MfIbnQ-ĜJJRJ8'')E8IƵUm<1DkBi)pT )U"s*33,pk$$!-”L܀e)+zq)D-Wgŏ1q 7c@'&?!ko֩˕E?\_[`?8hmǵ^,rT{G'TS9P7nnoOJ+%6SF]gǣ_|R"S xE\N(=Y6횔dwdy:sia. V )3Ϣ?~Ǡ0Eau`WK*5g4@`]hG< BP97de&۲׬0HGӣvYF3c@` os=P4͛赹bP::L'ŸGBuW |dsc53%z+>qU L=PJT;NlOVUû?ić*ì =֒6&޵ӽ[qԒY!I-ƻknźHfdZྃB`0z=paP,)AQ*)=_T;w]TpQ {z:lj3P>p#0E8iX}GXyJͦȊGűVC䥴rV\j Ab]}Z9u2:RjVQr@, :m*W{~s7wA .G}kf clӒ|cv[)!U>/nTz^D~6½<_ِhoJ3 T0*.MSW#W4]flE۟j+;c8t1a-secKltX^fg bB %ߔL_~][o#+DU$x %"os53ر9,ߗlɞV%;lwZů"Y~*V||pmJo?ƀOnrS9OLk}NǛY\.{=W q6PFw6 =uu>B=|@ ~&O]JnS!!I(w~q&*;B^ki= >ݍmʼMvd7{;Oq;R83J5f%Zk8cC1x֨KNQ jap49Eh goW@D5- +JDa3%&\fPHLic^j EQɌI4r7Ł ѓ"eͷ?>~ۗ/_ {>; srv8!y!lT%J)M.hQKpl*4+fcbB)}'|0R\OCQ{/S^f<͚3@&+b~Zf%E-<底\Qw&5 @|9st*Bͧk|Ƿ..k(Ϻ Սf>qq:fnwy Pj=H8>Irl`Z!$(gIfLZ&]#G>(m=ܦH $2LZBcH`& p9$RdI,r6=ifsk`zBc>!ϵBrs7391<7H4W"/e2-tΡHK8 (X%tF4#ךX=~d3Z0Wv@i'NLʔ*B`@)LK;ْ#fA Q4M3ʵ6s霺xv1, i1I ]8Ժt<VBZsB&֎d)8Z>4wa`۸a A1 r_:";|&4ڒ"2^!/Ps2.>XK“U8i8־QwyTVIgdU4lHI AiG?@x-$7q?"  (ÇNg*TaÏ.?uCãYsȩxah@wWU}2mD"7Qw3fhAQf '8!;%S-;kGӖ䜉(軛7ѵ.F/a5n9g6") ASUx0 r;`f-}!4_க)J :Km7]ãKmC}PSkb AUM7N@hHMR]' 3C_ x mxe2#[K.!"jU|gӼzݔUI}Lv&8^R,u!Tr6*9TTJAird TJJsT+Y.-z4$jkYC=jI},~ q]BLX)KU0leC{vѤ6R|oOPYۖURP䢕Z@ jj.WAȹLbmv}xW0 |ʻ_Rb5@*&~BJ_ Sa kBFsܐ{R}!_3rqຶNj$aS`ILXFT,a@L !2 LH!!5VNfJɿ@0(M};u j(Z )uN,\܅sY݁SڤEb\1e Eϊt3Xo=\LiyKYS2%%,(My*r$ddEQ ˟`6ȳamP*[\, %R(K!R&!e ;ERbIjL0AQQ\@|׶a2mˈ ւQ5wQ5ʣ9幂&I;e*]9勚d\#4 !Ưض.h'j36Fuz. dFR6|C r( gD&6NGF\:9zYn(^HX?uCc}g} h,nb~qg~LgR10jkZhErN8bY}-TB3G+ Ж1>wѵ.FOE!g7">@E!3콗4s2ibЮ琀hSLj xrdI\3AT7C7Aؽ_ Egx4G񏒚pMmґuS|`w5'>R 8< ! S<ZٻRדl X ^~y|(o?^/ܩvS{y.^Qۋ[gl ٟǛ܆T䵨]4.._~Vr7+x~el%SϒIPWKԶPY6f<޻RJcuG?[ٚE~)65UnzWnQ6.Oz-Ӊ6]eu|<WnQ6%aKFb11nmzNk;ˡ[|wB^v)De7y=Z:#yC+^y@S;sW#WHӦ/*=Z|LV2*Ol~5.>%V;wP?x{_?>|.\~6a)Iͥqsy{ovF|]yÅ^3/"m6f!dwOTF6>X)~W*x2JY-𞖛|$cjyKnh%<_aU~d`Rm\Qf|7xxlz+]k_mA Kq6 ^{:s>u{| Ȳ^}6I ޹. 7) WPĐ4jkxAF-w~Z77HQT.3(S$$GI@[}HfLྴu#ng/,no/nGv瀾wᶚ&Fg/Ԙhވ6 Ř.uaM ~K::ïOm˪X]V];>6O;$%c] =<ٝ`eȾm7dgGIc;˻ o{暱ZILX,qKݷC@w^.rqD9tf&E!Y*R%e@JpTRfJQYi%5qsKly窸T!.=.uSj=­s?+| p}H67~MI3goN|Zx"O\ފ#E%xj7 VdS* ZԢj92X)4RB4 ģ It10h˓!,'BgRgB0 VI\Lg+teR"#X8)}^7 ! hh饖S -כkTt;(aR&|Cߨ|}jBVA8*n&T w={!_m0D hݴy\# >=Kbdt$<kfyI)@js.Pk|'9 I 4C{R< SLTd18s3rP>া+;U 1w.6YֈPYg:EmZϦQRɸP\7[ Dq7A<poTޔ! m+%oocZAG MtM='%NJ*Nq>)YI  \4-Ö$ettxeRyB&k'#oQ8 @-}#@_vbAbv2l**r.f1`3fW 8 8~ѱ#5gT;Uxwt|ZlALI q iC ;w  u:kdٖ~y>H[ c;w Z{Bh}߭HyH1Rk3e CMڢ/KMȸ_XwxyO-{ p9:ǩ!5yJMkt$s3.UX3.762Wl eHˤJj#iٗIғ!GV 9BJ'CN%3'ޒ᨞ 8NH}ݔไaVʕrUH1 +=&uSjRg[Vzs`no"٬"` j!A9_s֐'jc$a@!3Z뷨1xMgrTq!%=AҒvl%~KCl%|y'hJBUe4Z;>=Z*LFN8FäAgF^ˏy$( kԥ>8(d=0)( @iB`lBɲu5I T$E:XeVDV%H$“G<"uS'g|yݿǖw :JɄ {:M=!doRo SoOgԽH= -©qŀb~]htKA%J&v')e-^A hrKhw!hq-' TC1)pr7f{e5ض+D$FJM~G:6rQxRxfT!paXG$FB PLq4 [ pZ.lH-vn *R%6\YUY-'O fY#_49eMүÏihu{x#ywʕpKVNS%>Ur%1]ݟ{:6pSmZa%$QˋOw:^*JԪfHk!GgR(Ew[SQ#]F &TPl.=CP&]{L ec@Rk=g2d{EBdso}p,ͺpt,|Y'hQ VF{ñRaZJDeZ 9JUE vfIN֤4Mji5쌽X[-i1l\[C\[RƗ7^Ҵjsv$XJ Z &퉲py/ěg2?z`J\muH=^Ѡ4ڇuZ#Vs5 eoօUT@u^\aAW VMt,յծ;(¤dU] A NDݜﺼU ]1M2tLKJe[6 RL(=WFO)Ͽd4sZ$p`\]~ar6-PHJFV7 K@hmWfVƵ=%.Do3ד? ^])1x5=!ʼnd 4R2}:j)qq-()|DuQͬUPPJ̫"Pޤ9Urv2Tj{ p9<{/ͩ͝q%ea@F ;c'Z/RC\#@&HaA=М=0:G<##hʉ CDVcbPhPD ܎ZoUj @*4bE[4jISg!|Qøu*b !,EHPAT1Iȕ0':0"֔1`a=ݺ/Χ&[ .B_ >~,X?~}k6sħ߾aosH4ů~ 7a&3²q$^ߝ2=[&g7(ñJ-&zTS۟p 4V j#p{%d\CE! '3w~L~Wx_|f$5sqX) U[Yf14Y=F0f9KT5;D,;ot=XTXB44ּ̝ngRh 2Q9Y9}LwZ-3gaSg5WOWU3Y Sy5O,nUUvμOqfk'ZN iy{IZj4O]u^hjq}HwGS֫U [+N?XoS=֛&}x[wU>x_i5‘Mux/S>{Y|YFj+19{vuku_\+j8wb\U)o +Y\ݵ* J4S5pt}amaj@ㆫa1-zVPoGRЬR_I|ԂrTkGZuX fSӁv(/YHZjdTPfCjU a2oQU_;˳ ç.n`:>OV4wp/;\ݖIUZ:㹷Ϋ? &l9E7ha%,+PMa.\ӿ FF@}h\!'>E)Mew &)}nu1H:uQE.m[ODKnM̐T]_-MK7Q e}GvS ;gڭ~@vkb]SM(;\ϻHy‹wwċ&B}og7f[TX mUv8[uxͺa&5Vs %{^{,J#? ֢BCؤDPS1Iif˫)Ȫ)%X{ETO}qݣȥw1> ? e?>w5sY(죫##[96xɜ鼖\C}]w"D~~\qM C-qa%akI&Q$"yczw7oo伙F-51= Ztq٧_;ޖ9I2e:,r獋_ӂwUJNyzn CtI冹S!]Afɩ~_̴$fcD!Z8 p*|7YG"R:g6. 5aUm.ܑwԲrԉɬa%VF n74qD|'1Fߊ dpl(*>ߍ2y.]& f_('QS[[e2lx7;3~,?VX5)ly{gIafv2 *|!G4E]v:+@x_<#yBfGx7y3I!NI)Μ'[\|wT6aTbjNt}d*RHLEJuk4ȖbZb a"Z¸x9 I"DV%*oόDuNzz{=XCd.͙**`xO Rv}ţYʉ/ı=wslhzUQąCGIm.<#m9weD9tZjvhvj:c/,PjKW 7qJE3R@&13qc 1lŋ5x& tI谤|Jd)He$Ē{aW<$䏧}<,?FJ9/.^Ilyǀ(ѬCF]wX>~2 NF "ARg;i%0, # ހ;kVBr ܵ$\5]2kroBy85E+.3( )f(P*~zu'-$4T !M3c@Hmc %"XT=bf T l1uR'a5I *4f4m5=3 Bd2]k*f>|hxlyPJݷ9im&(C H EﻪwiA't>o3|n`|ܓڶ+M: b9B y+Q` Sk$Nmna`? [Zeo'5$}k,6!a HFG9$]<ʴˎrP)߆nUI^tfc:UXr:U%vFrU$fEoD߫XCp Ԛ~E ^goWgУ}U%_?uF<.`<!?rçc> ۥK6{377.qea4!++r兠yQ8RP_k^ -6!T5k21iYܻPLwZ)/%ker;bBAJ ì Lka8 QP5&4bQ)o 7VWcA 2pa!eL JĔHx{Gb5V[*h.s¢qᜋ:Q3U?%K͸"J/dQPN@*gl΂"KEʳ a{%E D댒Hr{`%gy҃*nR"'3m5ł] 47)O%.;Gli4`w<CPMt?)T}.ObB@pGl/R4lF{@ OnMDZSOTHNqWe't˾Z6\QU+`_}(=TT N (bQ:t84긓]ǨZ^y)f #s?{۶m\μ!7M4As-ybK$7MmږaDdHh,l531{`eP;x]cyJPFG@v~)li(KXqυɸm όfKTmATZc]NtSmKYi?@wjܷd@h;6S{xլ`Hr>z/ܷ J& -G&fnCU锱ե(h}ʆ!,% Sp6o)w_<&Ћ'UĎDuQ󒴉ԏ#0zIpJ{h|%5^ni A_ .Ǹ$ZN8ė1A/"iS*ADr+ OFJ10H@*kqxTJjkR0+G|!)-a'#$M;5܋DfXp@lZ:i]~(.dPzjH335'hAwfrR8|4Kn}xCQ)W3{1waѭ " kXK%^v#yNϵO<; nhBwtzͤbA>"37̑r u#g.ٙ9g΀Ӱ'he&*(=JlWABN,l"B`D&5FЁ,g60$ƥW~CFasQ#g+CrBhNVZH|t hcbҎ w 3iK(13KA2J͘$!XK qӤ[Zc}NwMna!5_K<]d1_ԉgw!d{o fd3I:W񴱃6k]Z7ۇDT !E1ohH1mҷj 3ѻ%Ҡa~OҼ¬af٤gJÌ]߂PJQe˻dR%j?&qrSXY Q2,)#DdžڜL ;5Cf%㔆e+uaNŐߠXTsZ.*/h].TWχ?|{8:,bqvXc98*o]˙AqB}ۢ2GH]"tfVeA0rJ:"3L#Yd;B;\F+5ITX8i"Q XICb2*XdztQ_Aʢ vzŸM|J ӳG>mTx ՋTLӜ`T C4UhL+ I, 䥋u>;m*c^P`BPR2dմ Zl o_9Dɕ,0<{aÂPD:mҘ1+sC ojCQiO}X^V"ݴC%do;$??$Ӂt&UU7iR{HI$ ƜZ 1)ekl*%홲ԮN˝h0Ye喲Bej+wQR ~L: 9)&BC\ A7SZHajEUHB%f pم}I-q,^" z.-/Xz7 mt /=$ ex]nlV<#*>8"|{Fbƺ R2zvp H70jDJ6n*1#0m*JPQm"ohDUvY% x/U0`]UɫDS*!W*]^lEy$go~yCHZJPD6J?bMﭒk#bw@Yx!-R $H)͹R{WiozeoEJGŤ_:m4f0B4@hwjB#u^KMcT D1HN۪SVJ!,GVjʙ;cn46Fp}$Z[J F:odLqo²5JscA1/|}:;)$8=̜UsϗtHyz8?w+~|8xzLAsc}X8f,Fq&}p.̧㹟g 3')`L.tfX頭9Q"oo**j~9̢0.d/XPbbW/3E;PVTF$egDӯ\k=Me\\O͖%rTQ9z>;_Y; tnO W/~l~ϳ{|tzN\zxG/Ug{훷?={}_szFoaw0NGQ[?{~~_^7wOa5/F>\L rr6Y^r[frzqslg^IsB;ƿ~t[^O1Bͅ-.>nIﺾşO]?U+uܑdRPˈJ?zrK *Ś|׿kBaEbӣTܽV8!6E*Ks:{YpxTE}.NA2 x楗z0֣o F1'[uW?8/' `zaIhg}_"g~LE]_}WpנY9;xWTxG)|QxUE' , _9\as ~;e~SEVI!{}P!,JӔFCX(% a a5a aѝl R=DX$%T+,"¢l\˚VCXt%)#Mhhȓ!,¢!,Ma9gbB˄'J }7"-14Kg`{zi vCX4ECX4ECXDX32ECX4E7¢ "PE&5FdY,g60ϑt$ |ߚ7:+Mrf©-ea];/xQzh6'v2}#`'s~83Y6q§er铰tOw{&kwyR\߇p`mjN jLj9kKqC$*$, xZK"b񜃳b})Wp{>(n 0?Uj}ax:}uc8u W)Nb\]}Pl2տo3LaBw༸3[&j釳>ZۜK+8|3pX\;\,B;!A:gЊ \AKx@o5.K0c|T6+? :dG^;BЧN_}Os*8,63f贳8D '͠*>5{1{.{?dt;9\TS}\`uleS[~y3ۂ&BߖHK[OdxyهF7BjJ噓Bpu.G|X12Sdh(stEF,UH c˜VX{>}j+U?DT@:=Rׯ L :9]^zvRh<:Lѣ BwnzItL[2|7LKgK7{&~խ'݋G;6@HNn@<_W@o^0?p}Ҳ VV\]2)E(j# ]%>s/0}9!29 ^鋳\m?vBbJKЬqW6cuxwH[a{Y[#L2 Mk+Z%Gk.fc28].97[&2bKof/.٢s}`9>XQyp2߹ #$0k(c>eDGJ56A{bIA.h NhKP%|FCi![A9C `Bw+f=(B(cѫ[y9cQ8e N\\H(u.\˕9ȝc[.sB;=ecSZG7aʇ0)/FC`l>aÔNp<:e]Vk,ӁbSf Mu1\! ½V5cysjB\@zbԣʦ0\bA3;Jv%R6yEoO7AaBKv^&ы8C犂wnj?ũ's0&)ww7OO/Nb .EwKu{g}^BeClʰrgy.b./+-"TEl?0չuŧ"\";hRڏpTЏe_m.1V61;[7yGşO >8~.͛硴@Ȝo.S!>vBEI^0>7'|<;}hr?9 58)}/ oavV-|/}߅oa\ve3/8;2~g:V0&0O% `@QkRkMT^*mfNBS wjVWY6I4%ST ʲn0-K۷[_:&cW+4#L $P"Pm}'1}xyj<7"bX?thbLZ!L[@FR-<Gx&6s3w`;\ ngtBo?_].ǿ8Z|vхBxi뤂^'B׹Gt揓V-ºf@^s\˭A^hnjUpi Ζ5=>0*g`?|Q>}81%?:xqrpT/AS̉8 s WĐS!w{JU\yٚ iFs b;-̡WK2/wr^p&F+s,L^,+2Hl1fJ *  yK*?SE̒)sWј<0,);:$~h#¥ xW" 82Qf^{ɭ 9Nu˄KpUQf R)ipӿGOA6PYd>X`ZAl aDX2\j0(qZI)e"xQ),luQ ʐ2sOusv= p@LY^_zQxւN͹Gbs8bIe'^>{ )SҎ@7,87ja dz@^ srwڜ7k{cV=yѪոGFWUB;GrwT߄Nqgtj:vIQD{ (L:ժcܸie_ho97߮"LS3'#q7in0ʁk'Qjuz 9Su0ۥ(= fN\w&nhwTh׽Q։)Ө; |5CK矑g'7V𲪲™ b n{y}|@)"iEɟc<%0AјyNEYV]{<)]\Y"ak f8qlծ\v(F4Q)}Q7WWU|UE4)-bAw_sNJL,LjtW,j%K5!J5Fa{Ѩ4Ո•j,_ <ԴkgfۜuzX$5U"1]MF 5z:0~"`CE0_8ƭo x&>'1aP&,myn`U~ƼYl=)5?M4[`W&c­/BQt!FN vFn  ^,,+y6NsFTɆ jo"KljD.uدtS~k]QrFYYŨ}f7G~FJm$H0)S.9KaG8ϘRsފGQl[A*t-(p {EΆ5B|$IiTcGmDBz%0 qLZtԵ!m!G!Wk#i5eמg,x!3<\i)^-}XiBr 9\x3zվG1U Edd;b]u_i)OZ`%,4oDe Vr){)@n 0-uO3xagu[> +EL]٧R%6a%cW-rFa*k-XK\xGmk -[_mޣy|عaqhof)=sn:w>Y '{eѪA bI(%>ӧ<_Čar/Tb0H;␯E$8!{!bPuޣtrqie-mKvהn]pW΢ ݟJ7l6Gϥ2饔ărbu[hKb=?Q66S0de64gha1)E3cg\&ܲl:#"ֶ&kHU"Ac"p̀m YTmgf=r5Au` hZpI^: 7 Sz槔B!<8UM=N' s7X2pvwUz.5ЋVsd% a,n# ʲ Jr`-k@s4ʕV&xZQ"ڰc9FI8 Ґ Vc. 8SU8.oz EeA(9GԦ o,R:tҹQCc-cHX%:67R.#8xLQyêpp9MqelҔd[Pڊ1&m9VUfJ k|ݾbEu?Zd>ܴ/n?E-:]'I|ND)>Uq/kG8eEw,1۩{$וg?Շo񇰘_ާҽ@1fpwXdto1?O.`!)d0B`¯oJCi!Ga,|b1q ' ˦Īz'!Sq5kvC?ϾMt!eVQWcWa#ů^ (~󷗾Zc^ u~ow5lhv Ꝉan/r7tG jMʉtVԵ,f{MATIC ۪bمxZl]=VO˛]Ky)\= iE&ˤs-?L^ `tNz̲# >?y{)‘6)#jه%sЫ*b*Sˆ Sc6ҳȮ أI*)h; )yOִW7.~JU)b4 ˮǴe>}[n"٢'ߦ}6i߿'z]iD06mRk5Qke\lEP^9)] am[Y~Ja l5=Ԯ}=n4/]}]ll|C?2+l+l^'8FUg0-$X<%-)xY^FEZVUMKFFkDD=p Ў9~ra1Ciύ}[uC3K˔t&-u v=RC^ ay;O8CF 8!y>- FP&½){ %LKM8:`=:f{7#)h&=k3"=X儞6 F 6|~髻0vsխ/31͔x ϥ9"R9蹘L@wrߔfgJò|Ϊ:^i"_ f=<uR.|V[iiqJhP9}O$u_{koiYJŦC%dmDR#yN2#uCi%9e˜g &IFK3Q &:@:gWtڞf/VP*Jkzhbl$HI$' ,jdTdvYjJ)*|nϔ}3'W`}x wj7ݍoªJ O[e ڍ8NIqTdqkʿMB;̘Zݶ{k夶PGkVEP7lMjT|콶,-EV9 ^8)$ +9Y,<Ňl5ZV<,*Q Q~X Jf{+%HZ;Er<"^&-\@$=5;PlJ%L&:P1s:깐V(L>sX4}KdztC-)qVYZiUq@NZ\ΈT8S=F lMV(:j(%CЂOH>C$lvADswܗ|9 i>RYS}d`X*#wd@*ْd=|LrmAɑ)5pƢTti{w=jesh" bia]Aέf;:jvPJ:7ʄ0MޥRpCJzsp$f F":f[R %Y'KBOm]ղEPS׾ހ%Z))ZJ*M:٭ruA˭ avp ;m^"FNVy#5,?V a5@ !4 Y4Dh: Nvӓ s7#ՁH@ޛ(`)-c,Dj.3I-c4v,;?<̢ Ms@J&G.Eٝu( UTvR"=e`)JiT)JvTy*8K,?Y?PWqn+>W#RwT+ts{zoV@o"4WiC|8)#8iKjdf W]7;*X,W[ ZBz:V^2&RWŀFЀo,eYUDU;6X+6@U62RAjW~Hv\'ϕ{r',Ya|{KQN'{ pȒƳ?s)aNCk^H2l)'{1c^,brV\"L#F#FHc -K%P<$O5t|0'ׯ_lQ%tjgVN>%(˜>2ʝZO4TG]6ϡ?aU % _!5ٺ%;lΛJ[R?zȒU;>nj$G3,Vtfyɫ\ Isdj$oϐ('}9ʥg~E},0]/em[}]^V.}wPrl`k]DY?YW|W&cv2 }lYqSoO>6̴p(5/^_Tt[}.b/iy.ΉIc2Í0i|4i^`&8F^O*a C"u=^t1MSM!Pl%%jZM C-#Y#Eu^A?zkeka-gv}<5~޽`ԔB=(Q*βsQj#SrlѾF-V6:o&*z⬌V>3nf63sTfKVGBO=jtU2ThTc*U 6DMyBnJ& +krFя;QݸE!i&6+,O@GfdloH67dEYaQ,$B"f4/4c#bz]TZaƩE2nac`wxzP}D̟h B۬XY iXcx_*$82y :Hrz!Rin1ڳ39E/ݏI("*ޛ?{p!pYMGV@F) JZ@’ uʰBjE O:mv1dWɫmjq1ZC(ծym@Тp'Zq27WqAU(u'?fsrŰ0NB$f5>Z^^.MxEB:/|0?lpǨudQOfNed 6p[7d.{".Rha(XjT(.4It I',"_zK/]CXK̻w^igP谗@~6̋緞?34?aHPs^B!Gf(j/qVpE4A9jM#87ٿ"wAtWiχoVk [Hp)NCsLk*4Dj5c"3n$kaQ{++aIZt&+ scUFR0!S2gVrZՉٕ1%Oe%Z)8/ n\0bZўǠoC'vb")%^{>%p. > iNƟ{Ui7"P(0#^A?OM02Sn`U#*" *[Lg_lόXwЖ3r j@$f:, *jg)Kw2H aW7gݎMg$,Km-lj$.c_V{5y+,c#9.2K)c_ ({E8Xvn g!|?B/inRʇOFW ?~=>Ќ38`y+.$ɵӃ/`/6OSSuB16 p--Ʋ6#X_c<=H4S*JXe"390aPAAR0}H'ڳl[pDY []\K%4V ݪ#))jQUDeX\Tû x'ۡ!a(~Ut$B(F75'D˩hwKКRU}KPcv% Ùa'ˠ7~M,?$_ϕ1,\{r._ ~V9=Kj!?l3&Z|f4իJ6=qX4ηV2Βt쾽2äYcGkV:aPB2*܃Wn·T9mfwc El"z>(E"`'FⒶPH^t_JE޶5%16rUƋ*g[PV3M[lƬd(#Q{|n9șANnftq@gliÜ﷠3]{sլ5x嚟C>2w࣐7HA c^uxϯ5Ǯ4sڐ3WomC/EދByvNOi=7)_|y<6}j!}61\Ikqف5lEo &Zo?ЈQ "Qk@C9ށS)߁daΞj/}p:l|Sn˾:g 5)6 m^ ˻wÝT4m#u@KTۿt#bUq2L0z_m٤k+_lt6]I%5~DltMFCMJqT~8^>{nHqb|o89Rk>2!jc_'֭Cymp9[nVtv *\7':;:lc!_PTqm⢎ Q%is}i&5Bk.̡lMu??~_wkML|Nz1$&}D8Po;"[e^-+ݳDwPq#2K g41ת|ؖ#:+yݙGxg;c BpaCJE$ʹL*T!7 _Y1mNesJޟSGӽ͘.^.dwG Rq!{klbLk.qK.PCrg#5 kpgw_|i=cr"ˌdΌN 32\)DJ1B> ysw{)o"Z0EGYJYqYHPY,fj9e|U7:.Owdt%ׂ&O:& ]&OÍK}yZjhBYjceYFT.s+QJf՞Dm6&GjIǫhB\l#ˎGe;CPS:lse+wͳV4ڥ;ZE|܎&ZCo7tz81ߴR5V5V5x0[Rv)SU_^09ؘ"Xba²DAp3]ʵ:Vcour4ٻvoOF1h\&1;KB@B} `aR0J4Tٷ6YbÒ#$H .jO^Zk<Դߏgě5V3}Dx+2DZ"P)ͨw`9ɳ ,+r [Zx ԏTSiZiϼ28 L r!0.Ә0%aޥHo(A5\{喠0=).R?z PիR&՜Ih|\d,,pMEA,8!~2/ц/B׃g{ iro6W0~Ahη~cNyЫr7G`Mg4NL٭ fZm~0AΞrsF"FZ"W4m/ ʹc Ηh5Ű^ےn`>H,cnU҈Τ[J&VwD)LHEV=Tu #{~&ak(z]$OŖq;2O!y3&Hx9:!Ϲdc~}uLnFf1M`Xqb} ɚ۷Zby0w9؉ϻ(oA:yw 1YܢP${{W |Nwav)`I8K;> /*оDLu}QRWES¹֩XȾDЮ O@9A4]870wY+ۀ=+>}x)"y"մiaةșw((Z 9y&N14љ4ޡm !?{uswS;y-2< I(=qDR ?YGej>^iBQ&e|%_a[&|uw]_X{mYZ^L2r |X H NŔ jv񣥢dWBJ1hcz8z FW π XVP-N#&|"0̰%MLj%Q)&fìi*$UڥR (.nJ xBYQB9\jQLz1pxcND_/ܞc 1Y 'vKɍ(V1zsGg"49I9쏚S80RT~qx߬oh8gsb"gF H<8*._P N)ajǶ V;v/7d%/FnkO\!qG#Tw%6mKc?%q%m.5e[_%4Q I|q:ɞK/D:%Wo9{YR Zr\D*[ ?\u| sגkW${?٧=.f-/ 6 q;9R QE}1!"^ Oi5.;>y:V{;rRu#Z87Ӎ&A>k^\r_#p\>8/4[Qx'L[m 8-lY'+\H{: buБkȦ-zӛnCթ+ӏ5'R0;S>=ܗM^FSo$q'o3zFt-]|r#_Z̠>}FMyQ#PfcQ]ੋsg5ݫN0Q)ήQcYR9]-=(E5XYwHBp͒FC9 햋A>vۣ74)o[e4Uu!!_uͅi߯ٓnHT773h mK[-(2(?w;Jk a|{צ]1IQ:IԹ{C:#~r麝e-G5魁0%+0ƈ^ ׅ`{*4q% h3'''EIĺԖnr:,,HRv(f'FruOC5h"(B[ɬ28eI_"z*naH5lk?9TF\~p:mF¤OuZCC2_rm<6YAl( o"ZQ| \ ٬^'؞)6$L&z]T9|sM ( a(@:nAb&N{"x*P + mRi3^`pTԱT6jTNR[(_35ʐNRvB/*9;xtL58\ Ryr&lF;K8κ/\Dsd~v2{nN3hy_o-{ڭ E4K ;xvCLAb":hEJ3=Tօ|"$S(S0^Kq.]&Ct)wXKao.{\Ƥ館SVcb#Rܖg/'NV_ջHe\ch<,\ypa_d9sz{Ψw jHz/μ΢lfeoJh/|9}1B*>\_E_`}>YFw;xў`wcno2:b{%diiǂ Wn`[L3e(ݽwj-5YPTXX|-7ؿG 4r6ϛbTPqQd~g'zHz>5<°LޔHD2*@9~PW! gK|w_ox6M2x&m8-Ap,#di㞳D^`\66,iC-$u P} DoR +D`DB1ˬ+E[BXgR6Ӗ0uAtlLP}[-DHmB k,EzEr~u]g5^Wo~|dEen?W7KU/O/@@ 9E?՚^ vrTzU۫ٷsk/.H "_ΏFpJTn.6v#6 LEpQk%:\bQ ǚ 2#HRxРV]Y*G{#גf%nh^C-iR[*T,i~cUtɨ_ө:YJcyy\*܆8?M' W@YF߂]z6Y`{~ֳWWA= aK-2:`IxP;ȗ݌{R}>OBs8^fD ]a͉,4)퇰%"RSyrڪ#5i=$|2|7Q pL& ٣+_-w\r7'C Ҭfƾg1G1׹"w>l&1,X9 #hw5IE"abHqGY+ ⧣9.M'c7Lc^\h-e/xc PZagTEZĮQ(+>,cH^Ɍ|y0?#@<kmdoҤAu1e"ar9]-"3I[eXeĊD  JL<:< #ԺPJFJ[:;|x) @Ph'@rQhy) Sm!\3 @T9xI2VaP RՋPJ&_o ֚-jzy8b/\\vrȉaӕFͤ1iK5DŽdq޻:{F W-oŠBwMْze9C(dwk%חyN)h?}éQDkPDA',x]Lf\1GBz# 6QNѽѥ_BA5 ez=ldZfؒ2wB$z?wf+xB/#k@ MC! ?mj,>l_b1kǫXSk%Xoqղ< =>Y]შ쓋Ag9(3o Ow!!_n,Sf{­f*fE ㉼ݕRiv%+2:)ߊ}]N6U>[dlk%)!fgq.t+mV>T#++vxPlXHk.DRIЅ(@EF_rfDQxk㖨F54 {?I3ds-щoYSoIM1A< |[o =g߫K*Wj;fBTʯ9*I噡K$-Uz'=b7?RFb\'a\r{W./]yA]ʦwj[~[q^E?v!.ļ?'ՍEEx Gxݧ ?Rw=YcC 4:v\9,_Wii kSm Ov % KseN=e.ճ~ TbPASeBJdjP?YJ2&5{M;FWGqIL:)Uu$UxNlv x# ٧h<<8(͑-%bNnާqнeˁS iBYƭrS}[$djGߋizE<3aBΨ*+ϟ}6C$:ݞa8㹷ݜVX-Ҩ$-WKf3 7iA[vZ!6 lb.C(dL$V pҨk"Mdz &sX!#ND[Os!$•6X^T ~װ%~Ckq0 GBWLɧnB@K U2EE,uQ9/ti,\w*H#5\Yò0R,e0r)N~aPUG"2۟k޽J޽v̿["3qϊ!a\߁ʔ΀R:xh 8MH5+¨$PlIX:C=$HR{S_r[A@%}*k2!t!R*>UJƀ;MH`W6pRXɨ.%$%*I4Zr[VǺ1L #ƽa0\BlRq_GkR!#@IVkE±>5dR-c݊F瀞"[,E<1MðډSJ+uFsS8qk^"덺Uza-U''?q q趺l|Qvr~q5D߈7Nc~ $Y]يl_ql,Ap=PXy0tt֪p'mwmj̦swż:LdWIx]uG?TvďUi-.&žpRfmWÆB.ͯuUަQ;zw%6^;Wҫ褄W+t^kcknXRk}2]y# 41~P';R!_{T_AIw8}x*86PX hL)q{v H%Em*ѫSu΂,<2EFn(:nN;zi"P*U_)nH\DWe XL=L:ՒsR*R$P;6D<\1L3AsD$S%b#y D+, ${z7EBBE5ފH[2th}V^}eV0 +6`v: ׉ZhFKIlE-} F5BQˌs&L+jD5!ק8J)NI hF4 aQ'Khܣtm󁑧 K<8KϹCe S ;Sj&htϣ)B)LA2`8h`C&e["}?2% >T{>~w'.ܺ|+L⛋7MWmڳrT^gex6#)=~(<!Z7!65i0 ӒHFE/-xn_nq}T|<=HHLQLƱ4ڳ1)zHOr֜%2_vXvBv)S\1RK?r?'k7`:&RB.4bd]lv97b'.?껯/rg]6'4Ҷ:yRL:ߞNUr3\VwR ^a{O2GIDI%Hy A"=H'`TL zOe|DJȲYվ??վDjĸwww v/ r#a6p̷۵$-yyO]ܠ¤ĚLp*6`3tiі r^XFrFԢ\ =?n+=q1&[nl:240WLHm[còc1J٬FQgK!;99ٝ ŊB}&; 18w9aFD\D?%˂3 mh[,d[eMfXF4Q&8VCR"7(N ެ@z X5lՀe3 Y+U#(TZŽOM&46p0 r tBdJn RSaXr%OʍM"pOɝez0)z5O]vUص\ibV*gRRSD gj!LE ZAJ - -.xzYDXV#.z! n㨒(7[VH( E  ='F01GQ.Nry=-EPuԦQz#&YLε9豴o0ѣzz_p4\ s8+~``&rF25h*τ в;^uC Gr"o$}\ψwٷD'L6掵:l5TKG/B!G/u”tSp^cG-e2M21>ڸ8{mO */ૼ x9mD3Ц"XD˜#Z!B,(v҃|K?Wϻ9oo$4Л 2}ɲbjTd?-&>Cw\=Nj^1Rh6پfN&|>zʨ|r€ 9{laQvT#6NE&S'ƒKc 2}s4G8ׅ9UIB? d]_3 e,pɐE҃t@8ɽab2~m.W'nqh ZDl]{ ׋AQmArpce\Ųrwiψw7Bu[fV>7ڤL4l)+eֺVj&8JaqOAS Gb~u<|$G"';jnf͗bS w|ɽv{DdP,݁il/|BDXAdɇ̇̇̇.-sebZKJ iA <9̴ɺFT8|RIw$JQ}u p8=·}TreeKJ[pM͔1fVVE8K65(ֈd%XZ%eb wQjHO1*Ӓ" +hWkj~kn`mD8k7u$+K\]# R gcK"}f9Wwngm7A*"Lc3¹G8O?E8LgåE8{p@=k|T5̖}u{?go?˸ djÿA׏_=d4ՇX!X>iH.޻\:aQ>`C\BB2lyǫQG}z0Ck`Fp OLq=G8ψw7"i:.,% J5YZdGp*p,"+f*Yx# bVRLY.aq! h!v *@i9I[E* *5pJ& J9Uй{d` (@Y :[8A]f77w\2h> Ftb Y.u/߇ƈѮ E:MU j@ngjjrp Y$tl/Pi),"kyKMT%s(_rjҠω'=a:)¦sTET20<R*R=5֒VmH ֪ǭ cB4z Ey5(f4$B1huZ% }|)F8 R ߁U.E܌6"6L-2eo1lA둧m=cx(\@] ddz;|xwucarpee=7nU˧'#KsX}H_)iY6R\okLl>ėRyp0tdaoLaɐ|Isz?$5ߞ|/t:LKA7;{xJFa<9% Z'\zRo&K?Ğ/#Aa0ɓ|MKxYbus : }k]Ͷ{Nn2HV7/=8ӹݚ%>EO_ۻI{! [4|s1M|=,\XUJwË>ѹNt̷?47w⾊zť ꙿ{UPVPri y]2.}^a~`50݈qaH\0{[(5HR.L{#fP:!ԇ;{w!q#.\)O>ֵ\'tP2 31"ly0DWAեSdhm\җ75nS+d@ڌɇfVFO 15wAumD-Mc @ /_ez~!>};{gxh1O:<h}|eYA.4>H3|6c ]Kgpy HS!e[ʆ6K6m"FI23R`RTX^ח.sP)T}؋>a>RZoEԍ"/k[J5Gs0_U4:ԑ^Chg)9ep$K#/%ᤰ[)Sqh6n3n"2FRrj2IFrVHn\ uGޗZc,B %c;LBi(&FR D umnf:H{$e0쑤@:vQbц9?~Psbdql8B_PǢ7BwX]# dF 7l)TfG/]8oT+g㻬ɡ#Ajm(y{V2t=%"Cǹ y/*xG7_cQna󖇰zkǽr^J yas (ghQr{!Erک 9}0=j-@-:[| u/kӌ;i^c6v=;:|[5MmdT?2ؗjGX6T홗9!Zw2>"}=ƺM+r |Vˬ^*Ŏ*^6З/{ُq7 l؎ulm[GX grۊǨT i{ӭZyQ=Ru/?Wͷ]o2(_pk¾cDŁ*0rkJǽR(ȳ H颊1 9 EjA%c41#cRT m?2¶QYxB/ գ.*ضwITTQHQp2h! YelI1嵑H_J~R6="$njE_|jҖ7vY@؁ +Е4_u+GVx0_=LooB:Z59Ͻ駵MC̚]Li|2]ws9ڟ_H,rD?Wl0Y] >\P vd2e7#'ғvI{wps~=|7SsVGmѠ(Jl"Dr :)d K"ch@uj=joT5joݒ1< 𔺺%u4$zD{\ZbKKq%vYcR%98A$cDG?14 ;ך7nצ4A`9uhR?|o_cƝL|Eϟ6lj =|b$U#+2 K{y88ooFȅK^&PX2)ԉ>(Vv;cdmuUlkڼ b" 1li-6) 0j_5!Așql0F&nY$אl iWq @05-6/vhX߯@B0>=-wM4.M4n65E ` \w&BYa-pFiq1}1̾o#?my:t7? GI{QL/v|g>Y~{>~ۍ<]F|~RNZji0t2G7!N䙢7"FR`1chCtrUa⡈^G$t;KRV$iUYE:"Z_ˮX)9UVJHƫP^[xpkWH'i:cKW;`%8FHhxXǥJ,bV負IHg%1e 5 ")֜G@E )Y8˶cŠpQtb>(s$Y -T$E 47<}T5;,(͌f4J5>APbfk[kv~" AL@k3 䌞\ϰ#hvﲋ*I[U!ʬp.Ӗ`5J+FTbDc E]MpJ8%9ZhDB9qBmL{x]԰"ޕ]hX`ChL2%UD+RbuBNRԧDlZB4ɕh`ѐW r’w$خD{wEU5D#gw*JVLU[' =e%#!OtlBCh-]F'erN>Rw.7>$EeChZVLՔ i)Ͱb ѬbR}Nr&9I>.<$ief,gyvRLA,"ڎgDxjJj` +URu>Cs< Ӏns~n}>;jl),-q$J#üQBɐ%C2|28rps>~8SfKW* P:E!TvϹ&F`d.E~&Ο1OattyB4ݟNf~:m:]Fm5BTg]nˮ ˤ}ρ6L6hl/U[<~WQI 'W3Vn2ϧia/%ԳoA#''wsvvn|!|;d[9>뫫{7o}2x31'y0?-}b hX;Zp+Gnw>܈>`[ +rs;Ev?#)WFC{\۲qbGjpug&M筃{X s8;A[qq]@v"X6*!g<:rvgfi3z>9e[q6ii+n7MBC("/B0޲@IYDirbQ,֧)( X !GyFIR[e1~5]̥cU$ok<'.i΅j0,0zJV,D֤"'N"GQ 8Y-N“(ڈYdɒuƍH,OtB!]Xm`}KuCu~bu)4LrfP9o/wK񃲪]ZKYa9z:W^!t:WI>\^YOj ~Zl|[s*i2ɴ7>|S`8u4('b.B~L݇?scҴ1 1NN5{p5Rl7"\2*ޕ5#鿢LQG~za*p jRv}bU[b2L G'?Qf o .ZE%ru:Q=4'E#A"ͲPXAr31@M%uQOd4{:qtp0wpwW}H6I\OWf!ҿ&e&k?O%?\N_˥C TW5$qfohE+LA-t'y⒗\O;ΥIPM( W9|S'^Nh76Tڸ)@Zx.+ /ⷼr7+tpOҽ+s{&y TYJLR椮">( P~k%X]IڵC﬙1K=|V nBWwwS8(QKQgyD'M5 ^]>ݞ o?=}$ 64sKzQQ:m&/RW2dܯ`7\-$h+U"8Z&Mh<}~9Zp֗[ 2NVpㄾ҃Ck1 s5ю>Z:K!ަy"@m[D|ԍdm 3j t͎4@'}s_}DgXtѪ=64jJ΢S2m <-HE|B>!|elK+Aet;ӝɏHݝ#V~@ph;QZ3O[F}nga;'Oj@&Rnv+F\PZ]k4` g"ao+K0Aa_|}(v~Aqw}FԂ}aW!7!ɽPN:+_X9Ъ.%~ԅ*0s#1!~Eq6#P9R:'4@K]'95[:u[tyH-GhDȕ"ĴDa(%2ZFGIEG=)#d:OfL%R*o'{HL*+I7)u+9kq @&~0pʆ]fx*|γ:J<+󺒚7U4QpDGTt<ւDApXŮǶεy0olvά{b2 󝲄N|h==[Nn^[{Gh&j |9skЄ-1q^ki)ԤqP,'7H\x矸odӥܥr d[ t(!ApzP#sWF#,[ZQ]+/7INwdn@&&頝LnN.$S)ЛɚV8z(d,>O?u Maˊû[n[&w:oN+ٴܷ <{=raL@NHw7y4w/[~bd!'WZ]cZ{ t5WEK#غaͶ\ŷ&X0/2>ܓA=o?*䓊tŵ*hI"qbbx~|咙UZ./F0;)rɆ<U*fp^Wbii>1+1 3c'׻'o>:Y̷?ƝzGk|B9yVz8IXwN'QP.I|e'\eJJ\= sRnxQݼ8;KO jҮ*):aqeeIHG8Lژ喴+-"㩽dc|Uj3Wð*Xjk\ٺ ~d֢Q@aX iTtR a1KDТA4Ҽ2 rk)Ϳk!6H(y;rX-A,Ni<}W3J.MiQJsJl4rk<6۲҉Ljw56uwބO}A6TR<rWpнMt&SU`xK$6Rj0Nh@F.De_S3_nph&FX]3>MåNsܻ%LęN/I\Ow7f!~ob M'⊌8Le\:@*Ϛ.]5D TqRh$_&H@[]t!@Ow57P%/\J%0 `҆t4$HmWIZ&b1HRPˏFzFTΦ (lJ`gŴIZ(+Тlj]DR͑,*.joS=n/M6"^}hs, V 3Lb9N`+%AL?C3dhRq"I8Ǫ6t%\iҭ}Ie_޼WSIu̜iś|:jVĨť(zrwBGKGuQml}E?4C@nDU5;vI#HI3EĀ iG÷MN)egA\' է[R) ̏_c+4aسc6چ<ߜ\T:H6Tz.>XHԴ6wqYPzTrI+VXUYKMvTI)ך]S%oX_ÚD:D +n XyA[–u2^S+S: 1hѢZC]Y@/^-gPZuQ5<. znpčjo1;_J<<@{ǃ9%yG+z$̮?\oWZd{jZ@+9dJ%e'J܀󕬝2 a6HMDyK pfуi;!¯ N Eحvm0$  &2rhD͐$O \;IL&J-`D!ɪ=;l=;:OŻOWY!g|y]!SAH$ 70/gIxoa9 g*Mrl}QTж/Y(r4$9D XރH[d :1'V +~;Lו}ٟx|ۊwݻ ,m4[?MصN _KbxF j7-29n\RNE%>ڿ@|HLRj1q0a4ѵשZ0BzDJO)C,faaƗNJ6mOzad,4p;(,3%_aR6S wIw&]g@*I6c6skO8{NSB].3j)-(0h_#(EV ؕɯuq)6hl pݷ S܇ Lw\jy!R(JL-UBˬhs7HeB6p!);CvXBl|:I5O8Y"B$)8i ؟d嵛.zm6|jAsz2C݃>Jq'?¡#e;oD_:죚;ΡXi&twgWN^g jl(Я5ᜒ٢n~ b7ƓiR1c`}KJ 8910$V酽];{vvnoR@ͳj1FK-HY:-x.rf( L]b.9w)؆J 6~bOS͑̀W}|OvP,j3phA{$*!)D9pAxKz}qʢ_`4+˦w]S|k!ڢs}MWv1n(My-ݾ ШWޙ+Mq1VCo>׻7gn{ؙARZhvJlAPY:EIt] Cm0 IAǧѪ?XjU+$utuåf- 4+g_^Sb)pZC=m#jMj]0 DX6< 4?k 4S6qA NoG{|*j ?Z.wg|ś|7zzƲJVh<: :Chڧ. jꌏI0*gg-Y+723k CnNl!괖Եw'QnmX+7(Rdw0-Ӊm#Ļsa$yxPOֆrmؔg6RAhƮg[Du]a|~=Nn#w2do^\~˻rRژ}x BC+^)$:{sn{mlj7%DȖ*{;O?e*dOj2ndy!.Kϵ91 {'" ?ZWc"k.Gޅ *jA%޳4TND>lPʀHNQW]е Gw\Pͱf{7/?#O5)kd&=z ,kOVFI&E^<<ڻֵՈǙ8]No`"yqOd-' ͐NޡXdܘ-KO~PpOslx0D`ORѝ~z0Hu)~0]_Wpkރ?/ّk=s3ײs+)UF4ږXp:@+B^m8ޖɪ!s[gi?O69߉8SKs^_j -is\ uʡm{a_!nsJd-XIalG 0-[`[j^ 9*@n1O٘ SߠZBU oFhEdR)saYnǯpA\ qBXiShYNəR8j YBfqac'\f"%VS.-U!&2- +@k0[ڢ(r9>yQs F6O 2- (pgK6/GjV"WR_I r/OdVLWF1\5 Weh h&|S {m|x~q+w;qfg4ebW->=0^s9'|0׍$v2}ΏkOqBqyS_Ν>?}8pl|!BifݿVqjG /{ߗ.ql2Zu .As/}{8N^C(~Cv:EAn^1Flbb<11cB4ZO MN#{9F_x -)եꇫsD4j+DG?]?!" 뎋iz#އwՔXD[` KhЋ"r3wF`7^H&c}@ ժ{Ir@cSH9>d,LNc$tGnoHgtq-xk(\>9KhoHRFeR@Atު.pQU;yH?+$2zد%Bq=3:9إkn#xO%A^m{C" Ϋ w^̷ZG-E*YB"|mݕ 8]ā"}TfFr[ds*8x6s vSGj&Ae +~%[0`d^ѿKW8wJ hTz0ޑF5pyZ[4KO/}mWOJI{yGPftJZ *XJk q؀ৼ#+Ӕi  3 + mN0&7 izXw G Ex`=yd(GIڡ ۨ $ M6: DGak|tSBtUhR,pz+3sn "m.ErjT,EZDEB'TR(;5F1\̕%ƨԕ>ԢO9wmMnSK7.ڗvR:ɺ'ɋ\$zd#P $PCWzF@h'ck2 ^Հf 5aS+au$ۦBQexCJbEMY^Oes\=n6ĜJлtWtW0/y);c_u;kw/^OjeͿhcm.󯜿\9Luh]/Ӭ§}úb4^h˄:̇h1U(u5n׺֭BN6X%)ln;j!|)UJ~:XWR\W[ORW0ZJ$ylH3mYP72<&NX AҌf`D,䩇'{V/,~g4[~Z޺O/6݋j!' ΓO71I{Z!zQs&q@\ !&B'rT\uRZk5 Wnj@%ez\D'hD@K @{Jfk=- LZ7Vi @{JTS-H2AS(^#%*G@4jjCjC̉ԜH"LaBq+wI-pw&2E3g5 !d oJ%>˒==B 7!GR%g("?CyiDa$I斛}K<քSpkP$-b y d9 *@ԪPAi _^iퟛRM,aY7 DZP2TP،rFX[$ˬI~Z-! rfĎ !؜$0&C\TN9 &Dlq尠N`58$Kޗ+*+ߐI=9:aP#SyPLۑ 噩Z d"í~]d:j=Q8Qx9@d;'+cJ+&kM(hu8݁ULq-Kn54i uSOWF|INs~ARvWo8F_rktk_?}+of*?QϯӿUyfu Gn,KF!o`5?7X|-9;9Xn;gJd㔥b40f dW`efH|xԌϥ@ ,#E_G&yYtڑA)dzIEGM)PYOqAu b4>+5R'"r;0cj#1t:O!B4;hvƐsg.g۷d%!=m'@-x$!"pI"/U8VZ^HfD yъ(w'ɦ- Cv#֝#i9,A3'gHz؛{syaq5l.̿{d#Ax aXhba#P+g^`cߌ] %\Ϯݮ1I]{Ejī ջ&*zTa{b[;@!zj1GTj"ll9HFdAкrzk*'n%s R~cuFI87^늿 _߬Q"c@}Y 7]1!ɨugPpΚɑcdv;_Թ{Lω@>B̾GQ,?}I, 1N8lKoKo;͓51W"v}Mt/?,OC`_٫GW gP~|Ϸ֙rTp j,?hLtP)P,dNVD#TR'Y8oܷ &ﳆCr.$IxB"oy.}u% ]`m"bLvzڲ]rBw/̦ng$b)oX`"U"iԣg t౼69;C4g8zELĮϑJ: @;g>Y h6ä0ճ"_c;Nc_kVpM[AysUd&0e/mvgyk7Of^WyaM fŔqxNd}<9-X: A<8Iy8N} jj&i"8BeӄpiJ"&h%Hyx_j|7fCon9j1ZPQH^,+ zΰ ԻsPO+HVb T'~IWr\p[۶V̤$iui}{B Z'd[bAddR2 !% ZM c[g喁hƹA6li_fh'J>-/֤.˩ܦ!BVxBɩTؒHTͨhE\ɈF {}{!=^b"|')6AiHrfeYd\D{٣B6ù>X8'0:vx( ^uNG2˛GثoY܅~wF*D{6AS^`&y% $DKK:85WO#hQd(aP^)4DG&5Jk.dANM9%Ia"ɵ(@!ZrCG+ZEbT"_B cow)ގNU)OJ1甀E x#b(G"E]q x: L5h>ylz{snuLܷs-ش]==˝Q<`pSbzQOǻO*~_ " 5/?8t6_ Ϗ |?/TsifW;߹^ad'ж`HQW{؅D !w8ny)FX#JX0Dx2mDF 40unpiT6F.CXgT!`i| L+GL#6Ω*4D2/wn{)M UbƄ6%qg0CXȞ)x" ӌ/xqaʘ&x(q@\C@X2d*9_A4MFHE-xwn{)iRSZ!֦=CJK;?%<-|屮-<8ny /C3@ubW/q>2`7ƅw)v2.3 tߧguU65&q҄4r 氢Eû8m49CfZYڃeyDŽ{2ZvU1t.Be- ㅰ o|>Oyu֥ $1]`{SiyΛXqЖ!|o:=%w|uHJIJ-Y؈ %{O/'TaLkXSRkLYJi$qc$ǔSG *$wР1ĔG?Z d#5Xvݾb4(-! 9/ k"^se7S T!;fT7D&b/m7DK"W2Z.l SdX#fp[sP&SkA~XL KM FAM_ 8j"#)ZR!ˍE\II ң>J)EH5ﶗj9єE<G#o%YYu*hXO>ջ)2-C7=G1SXb"Zx!u]ww'XcX&yJ6Y&}(e}iC:mmB>. 'ɵ_ 4M؅~kvM8$U2M8p j݀h}5f|%;LFuJ&3zHRemGǠY;J."x9MbCx HH7༓cTRFUAC: =+~ W_=MM^k`)Ձ[e ,gx(2č"A8Fi< QDU80I *i 2G]ԘQ~ij>_xt_͏ΖiWՃeB0⚔5Lo\4yt+N"c_1I_?.A:D_t%iBu-~l~ #InkK[{L 2w55a#"*;K1B^ݞm)PYݙ;*m(S҆ޕ60SC3-*[Oe>\SOwd4ʜQ*Poi^{s.нF"I΃9Ѹ<0UoP~ţDJ\Ҽ(juc;)t|3ը?< gnCI9Q9n.>jy vy.ϹR^vb{zg$!w2[&8NF(UA [ ?\I.5zAT#Oy ̎1#{Ҳl7Ԉ])[ћj[JF4/oi^`+qʹP+X*,0gÛWdƕR* f2^SޢB% >H7i: ={WΫ0gi'Y\6G^] W{ _-rCfO8K,Yd=ΪcpCDjsQg0#LFh˄Ek4EA`32˩  z1gņ{.H{\+tUb%Cl-.IǟV h󋰞46Ń_k?V!ʅ#=K,ڳdjϪ*kŔ R\TQ30Aܢ`.22rSG\:P~+$(tIGY-s *jՒe20+ 6R42,QX-u$Z"N`jgp7Akb(@<>tW䊀ur36r"퍶i4Q[0ȘX/<0i&X202DLC'^;&L9oTh$+:Y7$HmYaсuV6Hyj[v0:=ljmY*\zw!Ur۫ہ1-+yGtXG;]^uZuyxgu w]Yx¬% s>j U c[/@|x<]xtF DJuуíl.\==GbDFxuJօ2'$FcB"D%;] bʴ5p$GXh'1(BMQ(BK\Iҟf'S>E8*q[ *{mHJ&W<={9:\IA/4 O-@sXӸv"tl`NfV(]c^ZalalOSڪ#eRŵykBky*5v"WBW wͬZҪ .EtG mRxk{x>_/SiJ^aA}iyw_:ƨF,Ő!cV1LA B+;rʁ]4a75wXbx,Lj7>CE]"ei  24˅ͷ?7(jWa5ߤ|/ny'9 '} %RfWRmU#IB/lQydk=;~XD)5Iodw 9"p7bfq_+\tij I32P}I\ZV[ʑ #0J;uX^"T)SBI Y4EQd3l&Ö8JX5!)&b8$`S lJt+(7MTi(~kˊӋҾ˧izsJ~/ߑ<0+*W 0ݧ[RnAOg%{O>^( )Tsof,rOn-qM5/|T_ZI)2Aq6(,J$]ס*Sj5p$0k 72skA@> }րe3 1bcv 5(r_L<rhw6]Oi;Dkw I_@ve}ަ z87~AFKau8*1:|ДN-ayqht]Ќbg}êG zf!eQ]6k& ^N E}}UjPn1߮ r +] [ K~}eL~w4rjtcE0iޣn |Ѫ6\]kL"D{ip>kFDy0V)܄t {I]'լZ8ztJzIǏC2唰/[\TBcŻIx!6U ,yV(tL5= *'Z!3 %Mᗙ_jOm%냟=kaͤKLX52O^F=ofIp֑yB)4ZYUpRx5ӨʂaFLwa[g 22Wd3Y\hV6:rBt:JлIܼYH>W$G ~ zlj~@rvsvMo)ŝ?#wL:?עm/^n]/nfu}1C3AÔeD T| u{Zuwu,uĂnÓox>q*;@kt![M/eܒS)S͐ (<`JFYYHx0Ǵܱd9Dkg s'iJTJ]P(:p*ode6d8GLW)LA*+yfjB@$M-_7hP4i{8a6b>?Q+WFIze9.)t #wQTa)csPF0rƠ?q3NC~%ktA)z b̟@wA(gy9`1T&u<8ƓYS*[alR)k=HRc#ؒXDQxcPcoBтDGDl\"$] \͢m  c g52L`n@H0-/e "Aa\Hb-24brq@':v*k !Zy1޻TQ ܰ8jplpWK^tqxL}^|'W#I5Lq$Ub`l1$2b%a?ZrV 3k\2vAf(.7E1L?:h2i؛$(֤=[#c&$l0Qn(i0xn?ۖނ*Rۗ^K҃7JEU5zcQ[zU *%MIˬPI$whaq;L2?Hh.D8#T턳4ᄍy}T{ϋ_Z?~E&PZh4YI%h?6tR뤿0!)(!oҿb!$WQң2Zc褱X˔r1ZMgU)O Ż}W-FoFZK(p+|MOðˑCV,9A\IM1u?Ř !Ύ\5R/G6$s~T{|rm=P1G<9Nrr)b㞅@HF )JEv']eWcz&v-]M|qrpn5-UQ:\u#]kõ1"'̼I&Gf(cz w[;}z|J7TB\h T͞v"5__֫w DegS8p&@XЃGZ#$\#b(NNtiY1GQ P6XJ3(,[K^+{iфjNg :DX]a_?!SwVK:"5 sZs r3`6j\ʪ%W]N߾Gf6d +B;JER^_BǏ忓QyON~,]~x}6WxT+_/YKH{3U1>ּ.'opĒD!H_QmE7$0{N,9skVy3p)nB `*1kQMWKMqKAMNB= Y1x@*r3$r ]0^ZX9I6dxŠQD{3y7>mX/|^.[?o#Yܔߘg1hg$wsj{x-C\ ~<[3qz(g+ ƙ0i i|~8csgŨGo\mʠ笽(f[*׶fgoyQxegb2ꗽW8tȸ5~٠Y/n\dZ;|g}|.N;Q爊So#z3#.WI59!QO(#%A>6ˍ~e%98`ߕ,(iE#u=.f.1VޗA)y䮆spm'׭o}KU{`?Dp:n_{Hռnq9ҳLѦa0Z1©:M H~C ӵ5GGU2+f*!KgsZ I74Z8Gdjif/ wS3oCVkeAvf^chg葧H]E4*Ia&@ 6W:*Đ.(G-apX.gjB# [  -J戔Q$v ID#7,ɐHMYiA`)cahD#)0ϥől1"z ˢ{̕|S=A();J&X(@~GIOdLQ_o6iK.p2&oj[-GA"^||~09׽;ho@+m:6됩@ʶ-Aw^J XZiqձǟ3mihtyկ1Ycp~ aɾxیvWJe爠KKe(z,4)Ri_/ŜʸgLz\9:aHef\qrjfh^,݋Q~{JbXP\9a9b.FˎпFї rA }ϋ9 S2eN:əX{ao__ܭCK&Z;P*ㄮg3@r:Us*lH1ǕKJ&y]N1XF.nnР㲛c"~8a685M%p$p$p$pTn:~(8RDǝe"H4de4X`w, xy'UJp#n:}(#-J[AӉn?;Lij@p4/?ʤcx5ḃDkhr;x5+G|5n#Kd'iJYǴZ Hˣ2"Z.0k!TQ%LX2 zS,9VSCĎ 5&HɃ zGN@BS5<;&ۇ涸=2JcʥdI4QV6CPSE 1=$1B;b6``R>z$bX ж e@+z~ݭj#3%d |ֺNbҖpJ @.rC:8WI`V/F :葪rtp;ڸ.͜Jqgzqnh/Gi$ޮyIDH|nV}͘RZ bst ͨ WJ-h 쌠Ucb ERظF A."t2Erg]znHBTvn>;ᬌޮ#¿^\Un/,U'.j-|4G/T"Zxu\뺗e`L3>Akv, RIU0g}' γ}Ws-n"؜"*8e`IX v [Hϣ1 ajnߕvQ>wH a66x0M:t>~ ;P8i`<=/8|ٌ{۫՟nU㣅g~Q2GɬUBS}8s ??] ̕ë?{۸¿9~? CfrX`owhݶ1l)ɦjh6YUu$~*H9:8*$b}T{R\@vYqnlwѦ.n*/ı^T.X%1`8?_'ǯY[,*_-|35t|2 XQd_21Fѧx*7?ye[Q!(ԷѳA 7?\gRkGVALIyTK۟wГLO3vu<fVOy7GyL鮲6LszM'dɹ}ȩ^?Oap@Ah}&{={`+&pshyjYw5p0e@W5IpM amJnKhlRM$N-jۑb(exAVe= Yr8P"2@*Ҕc[vF1W&4SW=,MaE+go֑j2_nz؄H 1wGLqX%$0AZ ˔ 3h$!-4:ed *#ܤ$1rJB˞BAj*\승C:?{`EaG<}giT tI9&'r9lxg:vMzm Pr^tzu6 N؋ˇ?p)7B߱u=sfsiU(@|8x ,K艣t;ϖ,b݈#vI(.:/9G&M4^FoL$Sb$ ¢$9 턥U1xy%qx@uȞXz.<àP@y?b x2>˪1z)Λq}Yݧ|:Uqb[/ " e0CD7ɬa:2P'&o۹5mdQ?#ySO0#lPOɼwДF+>/ htW|<}恥 HCaLq41J c#HۘL|.U66l;/ ॗw E9S9rG503ws5ܧ`xW0Dq8&gk{7f֍~|a嚭\8}Yl&m6&2rFe㞾M@V C8Pǐ"L*<\wSš ɹ2d?9{whi\R4oՁUE^\}j4 0#t/ +J[qAC7ϮiB3RR[i-HGbE>ƌ2.P(IՀC}.9z75<7Ot\8rG\o;p /V# JPfW@(ka?4ˤQ)Ul`$\Q%‰L,s;&Q얣 }]"͆g)xYwz`D`^4h0b$0Ќ&T$p!bb!Mr)Kn$XU2@;80!P6YPfLY|(h(H8k"kR NpqAvY}yiX'd;9Ts5F$B)Ji\C?1a@R ?HSm+sDL{o|UUG]Q+~ǧF'[3aɜ@BQnUk<${ )m'#Z fV^jw%W RҢq" I6IH{a(p>d7Ceen6$uƥ%hV 풙wGs8Y\hJF%Fz5=̺5!LمhDL^YWg6@ܻ6ն zq<.FSDI{SN5ݓZeNPdսsp:W88 !F@ \I [A)>F 0Լa#cet IbzuPW9D-{A TQ c92 հEtP"sZ%kb+E1^ !"_` oO |)9&;MKuHmqs쎰uOgS `bb?K܉@4*yaT/Wd6ai'݇-8p!+ gUx.+_*xL6~?8H;.N1.g3/xNu@/C $ۢܺ. l ڨs\ ! mʯsusB OL:&/-.P6u!is.`aE"f``Hb84q2suqX>L4_)2KYZϮ`m$ %`R! &p@*$!diʸ4HMvA ff<)ߏÞN$Ԅ"p*3E1pfRɔ1&8I (R(I(L!qFE ;{8arȱ8%ڋ?eU|:㜄Y7H`1VBs8 4Vc< .7˗'WbIsTtJƏ22\md.1wwha(^,NɑqX0`Ԧ2):$q/4X V(&8$&eS**\ֲNXjJC k^M e~N a1!};XHhHC $np亹8<T"Tbq! NX!$F T y9` @/ Ҙ ^ ֋B@Kz\QK]qGwyXl[~giG].įIR#k4cÃTgrRǟVASJ=H0u-q4lcSC,FӪ0]wXGFi94PD[fŻnZnK8կ_Ɖ^,,Il3gyئ1&JJ(%iԀ @&(B'\TrLk%t+ Li,s.8Os"lO }]Wb7e˕(Y2g|҅~/q͖X)sӱ!%8)X"%GX1J$Bh& JJAq!! d'J!Ԃ@"IKY L0@;y2d D##vG2Ю)(23, S 3$kP!f@4;2vYn"-F_ZY04%Ah@&Pe[;CKSh BK". - UIvp34ʸrmC`!!֟Y 0eI&?82(ۛ&Y-oꬃk.ډTQr@Be+2HDgVJ~+Ie; 'fPi4\ܡ̒J;j\ F8kB~);GU_M3NqxP}L2Yfea*eHYW3>KɅT;R)gMn4M KT5}S yAJCM@f! G ZfG??v[UGqs ?V\[Zip"]6i@ܪnvby\kG\IfYճ(Lނ+PvcL8P3_\=Fo4㍃ +4Rq߼EQZ2UbC^hA"UBdV? f^R8-XlѴڴL=>i5uP9 6>R³2mfl}߼(>f["rf'|t1fm( g" L`bdiz_Qa fH~r( ؔ͜M2[x̄IvN \SaP\6V!iY5Xq nl E2!d!u7 Tֲټ-Y~#@CihRFq\ 4Cc$i*,2fcxeaɌh :LJBal#vy Y!;GUj !YbJѡXnAbq,Y"&IƣK[b㥊˫{soEuqS346؜j+'ekNY5'Am}]`tBe;~Ky|Łݓ|:Ç6L'wn3ajуg{[>w1XBhoسK'6v+$9Ԝ\Qɝ=dlRK΢m^hd[m0 ]y$A53N,jzZ8` MN:>*8[v{TLDq'G}{?SL<<*(h O^bNIciَ(@! Ac16vu[gɪ7hJӁJ:$GV1:8m[%8<}&y~wѷot3i}ww070`fG5eu"VbݫƻzY}hS1Rf/nrg}uǜ&RYrJE]F?UVʛ}Bͫp[(w6şm|Sj¨ WN)&s ׎|N=td>ԁ{YRJ>+WkE7bJ`[[_ JB3d#V%y5N)nCD7ǔ&v( }zMd%={q,0v>#G'N{9( Hv!9y^u}wɠR4"~Q108B߹9]<*YxBk.\ ^4jzߧٹ/ ٌ'@^^ oT_צ| DZ1G+S{m4yC:>5׷1@5:G gE??^4[9TvfK-SOփ>N Mehn߯oL`+s|aԈ^輼 iv*f"*s2947ۿ\"[w xs@V"Oj;1P]bF5 ۬mF-LV֛;Tey^ 2L }}##јÝ8f!Hx)؃¹2w틩1V]c'o|6XGA6vy{y㷩Mޅ)7O9}j6Rt t\T-+/UШ+]EŽJQ iH -9% ڳO{yeK9~?[8Qs`zgݗ°%W'x/I%}:V RKnЊ )2)H%xrQINEOOQ5cCwӽL E&@,ƈBY&}ţ tHH=j+Los{թ+1 Y7;G?6x>["v7.('O]>)_1;;N!|̼l2IdX7 yqMH bSaqZ"i~zǓ8y~xS8O#ES?p{y:qī wi9g 5KEIku_9Y!E@8 ?G;EJXt~8?ً8Ea2_6q/Tfɔ B+?,\$4KBd $KulI [Ėz˘, dN ɴ [TEdE>H 86WR0tH_e9uвLB'tb韔Zc(O-f4vSʹVi&^]&]UܐL3\pgT֒,1b~6}vH'&5'o@%@S*mAV97ei%n:9OBySѴvC)#9:+Y]=yd6c IN 8=@e:GŅyɅZs֐ -nZ:40z@xC0٩a)*;eA'gDoÍPAC80`F.0=Bi$}ηrtM3"L͌'EE~@'>ZdˡS1Ց-0zn4@шz.ԘDlԮ:Q1_wl.$F1\LVBH9H u Xr@iWʍ̄Jb>iEP(V䁉<JhYAއWUBqdl&"An+ 3-['-hRUTګ\, 'zV ?sv2 \^т2 Q3۷F1T+Ρ1\o'ί7S,C'L6G`6'=̼?MQi^c8~ qmF42FrJ}/3 f1.bA<|q36>P/%Er&'t*s 5TQXުs~eyxs-2[z+{JGZѓg{GM&BvrZ?< MV3}O9J?(S3JGor+1D ̛8GoE$F+ (X,A_>(O҄")5LU@L>I% v+"SʑhK MkUaBZ*NJvn1zRY)k\o~iчir)& % Q*s`m WkmVEX2EѴE^NeJ4{Uugr8BEH|Q \7Ex2ńڜo:`&M0БQ"<0Ni?},PAHj$"^-.KN흀e,%HE,kR<R:)),h#I%ݬKimm_6f1ek~ k/FPRdsQ?hIf#/rR;5I]W0M]`R\2;5n*BDsnɨr(A#YjRjlTia \ 8s㋀"koW;7p8]fx2Kpl~=t/=R!X6N_!A{ۅP&KCKX7#`}r C8%{1[f.ڮ6i8NXA}iy[o:`L6f2 )5- e EǵucT&,q؀h&]r=O`T7nqL" Ǔ1u^;ii]߼T /TJ8uD=n ϦW!%dF:3p{~HɌd{mPeI]6SZ&92"?C_Wr88``%5?,p&`I7[b2G,.4tK^>I?YLj`>I^FRwik~ZOs̙F{78ɤ[m7777YVf/}SB!% 8BX!q.H{"']swnS> km՝9k==Ym8bަ5gVړvg㟜5> df &w0ZF~LbŭHe%h_(ʀU),[sov,*tlLJh+VٕH{XarP뾯VssDJ/8@o`^^OAbO/F%9zLS/a}_?W)7 /7lIM 2o?g_'A^x=}JH 3  |g`6_ŢtrKdo>^`>ͪ9Sti MS*7dsH{Šo'븧UJ򦣵û?hX^FSj p[&\xŦ*:H>},DtыadO(}Lo㤧הм{!վXx9[wя R5%'D˯6>̻h8*aԗѼ߃3aՋx?fa~dj2];a$7V?*ê=ɣNwˇA_p|Ad U4>77_E^(eӻt3eb}xͯW0$*>Y`5)#-. qOcut<)x 5.yX$ ]2Z6+fvuȭW &̓{%5\JKK L3M+QdcOSa6[L|{f۳?ro]e7x5ǴEW" }ٟDQ%S۷MF]|xnՙ{1ȘI7*`}Z;eHqy%J*/81@ EB"U} m0քx׆o:ؑj% 94*!Bx' (X)`#9%H{bsj:ӨJ{ؐGG쩵-Wchom?[eb&ISoyؒzTkITJV/e L*x' h*}KHPŸ 1U݊z/gBJ9M heg[NַHA[mMXUUɵK"Prt .{Umh2\1`lžD!iᤵ+\8QE!fL3\ǫ!iE7Ci0ĩkJdnֶqVn#b#%2+TrLf\|2qKyY[,?[x-N8p)S 5TQ}FZiүKcҎM^ሊ`FAlsU X 2eIJ2s'IM8%J&*^O#_ x\G.LQD eS:< 8/JQ%5L[B%Hiu  U.ߤSb)t3xT *Llj*)Ϲ'[1YZKDSGn]Cl0]]؟anfpƐsMs#} !rHKp߇tSN*Ƚp[Շ}H }(Φ3E) Bׄ5Eҽ=Ehh_F$T<2KK~>*;u6#.Nc}1د!?t҇|]˘wFe^XJP{NRpP,Q43!)}r'F;=^h*ziVr#@6ъ  ?"":{lV_N\<"PϞ@y ^\9](ATU~thlWkk,j=`VgHG!2ݥXpwکru:9"s1c}*G 2i+-eI ^"IyK  zM<3jM/o=7@L(Ȁ#qs/@Ğ6*# @ иOZ$8õὲ]lyc4tVt5K0Nðt珖cIP;YܥX2e(ۗ*ƪJo4+! 6`=P0 %G*HH(!l  m$a,e;*%e; mqS1TΪ+:_zPtkls˵::~ȓܗ: `F!ђKxʧB Y"/W |SsX4}?dT5&a=IaoH i%5]6klBB_H+m !&:RRg XNFָ̭],DukYސ.a)!萐{0ڂ4Swjŵ:F#|z@f5F(Qd|3DZ7:qCP*R()3)}I,F X;=x: JHsЛP"p蕁98HCk`Y_B Xfƈ0| 8a>LG)La }aO(,t> o}xu(FWywS/0=fXe,'4sje%q& EitѽLrXz4yti:ғk0$`}a3m8%>3 !zQ("kh=C12!sd-Yn:g3[ FgIyLDe#zgC>>H֟3df[/Zyz8RtU _^> Mi\=_~i'xگހJnoE~?~xDa/NF?e7#/}p]3Yj>C{K6N{O% 8;gFu.pP.RZEUzvQDjU,|9 ƶ6 eg㶇j&CKE<[V-c\ͳjH+JR\6+y9LvwSF'EVϔ-7Y?)oM@Ayzf)qPm՞v1cWRqf5Z%m@pss\?o[x2jTIY(y;^O^Z-M$t"Xԧ!NEl f5bV." m\vx!T@zR;W dW_zߍǛ}g^'|; FshY[}4 D KdCh&{nқ-Тc"k\iMl\P^ېQ͐=U¹޺qn'cGWTQI}xul.&CġFP.4mo[hhDXx_"wmZ`}MsK7e~C'6K PXWBXԸZi7Z+8Lޭǎ"1ƁnJb& QX l*ΜeHIfշ bICCz׆{2fEuqvER~HLBRToo:)CymZWW?\\q*)kx"6]n~IR0Z 7OCu`2Sۆ[sFmZ~n?66EW2Й`YQ~NH&m<l ?ّs@JDݡ3h|-]x]mW-BD$Vu i׻("#a(l>;gJ[ 'b!#inMo痮]%TΖYpƣwJsh%!Dփqdn)!np&/UdodV'jĹ3!&GXߜU,:W$ꈕECň%Sԍ8GYNYslB!-rLM!::deANx.L=/5Qݲ8Ge4,u0ڥZH;PQ&XLnm'#r#f.6 A a%_: vA:v1"ݤRM ᳑la]ٻFn%WmRdil&Hr6O dcIl_R=V_k0QY>Ki0]<8J7kF<3/RF冋E5V*HuCʒɳ^'zTݾ,Zvo<*. Z2w܏c6?-e;UȤ>Փ7B|{GS"C{)LYpWk򔀗b8jomW}RE5xьV]yc =ٻ}8(yX-wnAA~\ vEvd6xZȴ΁j":k&r,yNSG ZEK*p`='"גy`xe]@ +uɧҕIY{~N8PB")4ⓝQ<kˏ^^qgfIE|]}Y~ѯ>{tyaEUD;dzQO &~z%UU$:JtVhʝG XeqZ)UrpR+-SeRCX@$kL9T3 dD_IK]*n'R\Q6A9Fhkثې|ߋfUm{/{|~/2b(8ߓ7Tpxw@<:_\ YϳY*$7 ןkͧ|ܛEY$WvcxDn4TDBO;ksZfr\m3kuZEjE؁(sa{׹p\xÊtʂ 8%+˵ʫWmt&kqNХSjJ FY0> ZR!ټn?ͪvIHhIq&T+aت8] Zto7%?ٮ˜At/tq"M( GYq}V@] Kj.ecsB)i}*@P:~L?S=i#t" H& * vYz-z. rާ.A;ꉋSD8 ]YPiUK؅fe)sa@nt FM(IP#>Nl+cE_kcmX@\ n,:MyLOVDSZGCuߍ:Bjɏo^U8ͱjxҒ6R;cFK%y%ޠNN^ϴ* pX+=5drMՁ[z; ۊzk$dCw# 8^ EiO92q$-Zfyww"*/J% RKM8mG+g/5el= gt@7p˜ 3gcua8N tќV֓׮nFM?z8/.'XxS!+4gCI7'%gE)L9R/7g{5s0Δi^`.o]5@o`m̬*mNf`lIy@7`%\vRyۅE[.V-hI@½ƚ-f=`dVan8s\p[G[u"MRaOOSh۾yU#FlcjtǗSO#][t*Z-l.iDcۍm@yePɤT`by3 b٩Վ0Sw8#~ Ye.IU;:kaǑD{~P__Zujשe_}]mًctB5Rs2N"$pRJM@/"x)z[/3^>?Mr`Դ_f# 'iqJg&W vz3-:hfRҟ&(tZ Wܔ}3^kuld],ic(g?Ku i+@q[hP(.Հ5[U|ZNUFP%55Փ￰i,qDE#vV,j*Y) fy\|M yThh{r,M0MYIbaKy<.SbvE5\'Z/WWحP^`AaFҽq}ƑƩH3?+ziQBM?U.4I'Im\sF*PTB])^PTr|ܾKlPRWּN}Cw1VQ*Q4*HvcT0]|2?\n."Ym.[}iOʍA&ZJ6:0\]]ײxzp-FWu-MeCյ4)W,?;biPZ0cBh{BB֣C!Ԛ 3GmhWzᓲZ5/tߣr4Sg>ԛ$0WgS:T񓿹׀7ypNQZYAS W$~D-TLL )Gɼ%;Ky+k5JRֽk9Os[ M:zJa`07/[H/HaKBĕOs`qV^f1>],*?ji"-tN*%?v ~O D}ӫϷ#vv'*;:EmVCr0TV!Q=QŅ ؁ yGawp8 Bp2}M]#Ap5Rܥ@l$%}0q-l RR>V^}* A@1"#sT-)ۦ*l9Ca9D̗,Lj8m@,\(z+N/ Z+5r&MMdaCPٯwihO~hz`>鱡{P<4FAb~\3}wwYz\KyTWU}r! ]k6Ut7e 8x @p)P/ *BJ%2sŹ &ZM^C'UѼ#iG+럿,|(?)O)ާd{} qd+@(b+kx(! dI僖H10/i]J h, ԟsͿ#}7p7?'mz788 =>$?o-_f6'K_irrZ7~k \`[[F^rhjnf^@+wKNPG8kd -sHn HWh-x+Zvʺ-K=4J ZjHew%Rq^NwD_DBCe#%''%1JJmYXL)%wӰTh@Â=J0()Ҍn2rA%:ѩ,]qK5 q &̎}^,ۆf?BY"57eܒRXBxmSnhH)7b~^P/J9*'Ilj1ݦs%%H([*ZXG[s7oR>\w޾ T`)cЋ8.C363)@.HScŨ o]q޳h*,T>-uߔ\C-IQ-ooJ{Z7w]&Q?(ƓO&czVN 5`kr3fHe0pNܫ:TdCi3{U}PQ~HH RI'52N%*/#}y.yC}Y1MZ]DlX-8ؙ{,‰͔ 'z*b/\fE"Mc:Kjyz^޿L{ve?l(56uW }ǔPPhut(nf0P A[Uy51@ʽ{1."@xwGǺbr*c. $c(A!P8GX,R٢}ɗ4Yُij ~)%3{Y6;w*" pgeSdd@ s3]F$UcgBF s(P`j r$7&HuCrvjPn<A홠{rMl,|]uV gx ~񛴲@3^pɸb|QS9R8u&H}О2%c0-*T#- NSli26ik{a( ``rU2QVQDqF "hO!Oƕs%IQ#;vS\lŸAj0LݚtpR0N4]믯IEB{}k ?lI(-huk򦍾ee_ڏ#w@UX?[/".v1a ֎!$2$vZ5r5o5GV?QjZH*:F}cI*@np+OE)`JJi  ^qQlIus劶sZعF}a$`| JԎyNh{`9ӨF=0ϵ"4*Y_G,id}G)q<>ˇr6N؋H:|b͑xzI'.$HZ GRCDR~߈ՂX}fp認آݢ\5MJE;L>{~kzym/9m=:ap vkPc8] wi0? 8W$@5_!-T2҆`ic }!XN/#_Q$)cyq%'WzՋ؍0F7b$i?`oX t@`8Vx:ֽ*tG}o/&*DW:炲uM`Vr~N6.s6ߟqq88u6 CUnK~Zq%zTZ;]Nx+N5+8J{,_e6uB$e(j؊-[WbKNC/ǖ Pk l%#^򧌔\I>+(.G؝YGGp}s朇8iϫllvbM5גwpoWZDhIz+z^ۖBQ:g@0ɲZ) gKWI+BAm~ B3J`itHie蒅`M8ͦ_]VqZ 8/d!BqD[S#E>8e3f ܉VB[$Jj Nyu;TboŸM49D”,(ks\[SԴ[z\s ]ZR G7<{;u4v2_ÝPK|]6B0:JrͺAk3=Չyޮ r!O;G"º]p Vt9;oFp&şOne▝nв]q cnЁ Rx[69cŌm\px/2@/D6 ^ZɭƘl_X#7hW&h9-n#$Qp񕥝O5˓|Fp&5N^qݲ'@sҹi!HB j}$$zڸ`8JhbyKp%jk>D()OcQtl^Ga eE 8y9ԌF_$-2<ș5siQI44ְBtȚy\)sy>1M`>:[Ƌ Q5NLYG,ïØS5mcqkd4kEgҊ^:*Mz"WsTZ턝H'jzN;Ìy׫j(sF<@ b xc~$CJyHg\D-IG1h%'X#Y8{RKZW0S7-Kry oh6iޓfIsz-k]5 #jL6 W&8ndn!bܭY2U@&tnRFR,n}T6䴼`Z}Yf/< i)a|gv!%` fHU[Q&$N}NP8W4Rtt}L*2LּY:'A%d;8 >.-ֆR⃡Y TdgKc B J0AmZ&jNp HE3Fip(4`il T QoWC|bӾG$knrr\] $D$2DCF:5Lr+ #,-u&|Jw@)H.yP%Z)*zI4ӥȃTs2AW8JwBpڃiUsF*ZJ0l% >Ps (JJF^\x{f,AR TkN9`bv*A՜ '$gHp261 t{ADj^=ɛXxOiii?%;/q&A˧'q04s?㇧~IqO<0?n|l>>;}Q_7p0-MOi1=!wg۫e-m|> Kb007OVf;+W{j0/?X%Wb"hkq^kf50@UjՋ)rVu=T[ɷ[+h|i PI(ՌBOvaah.+G9K V(u(dAϦ_ zW3ߓctۈm rVИi!.3 P{Ƶ \IJ҄ñ6 >G`W`@VlKVyS' !(DTLN.HŽ();YhܻJ fL0#DM6M18=TI%F)'1^:¨ B$3 ay<Q _Y.QQhy$"rR+l[Q{1PJBZ@8Hp )0˩# qsg!,XD0,eT*ʑ+DI <2ͽ(CX89#s zLA!)>r,P`)a2rDX 8x3WT1,}FPR#S=U9UdRL15U ! xFS6 n.q^>kN#༁?Gdt:}NTЪ$'K;:Ui=۬l`P7 {WIy2"D69&CXhDYCwpA"HYخR(Nd[1xI!G0q/`"o 5\Xj zԥ$0'cjӒSZi8Uz6c$"4Sn"I ߙ+AqH`qPY\a0Kh,UD>bP)"IpɨuU#~5#NOx%o*&IDdK^FЪ2%L(u:;gc8條%ਭUIjEw8֔fת2mIAEͯLQ@c-&5@kENvNsjkI8mHjYn/#Q̞>͏zM5U!!L=ۿ V>əɫ/UY|&Ws傫92sr\=waKoF;`8~xЏ#s=I?{0a-u=G7Chx4 {>J̮ࣟS Qj ӰSήۻ?Ҫcpy IVN; 6K>~ Q*Wû,_yեa HXijWd{.ir٦x#<[e%E?Qw>ފyF-ފko%$^bwuw󛾓f:ZB+{ %ha' wX7Sq5gI4x=1ϨA<r;1.wCq g+.m~lФ!NT@ƒ 1X2~*읗͐ewGMt#z(% ?=} ar9rwzt䖃-qGwQ{39@c N-)jo G; 1qF55\9č)N0q) u3΅F7 Nυ1{N̘{zIc M-)jo 18 ڪhLa %QM!ՆTq@PsZ!ho ŠN_^lFp*u(wȢ]ml 7yddhFۻqFь.ZgwR3:]ξG`Tc2ΣÈGw#?tZ?ܚ5[í`Ϋ!98KH$bʝud*ztpo}DH1gCsj;ٯBwv Fupfv &GL~v U:G A';Gz3pxNt76Ne?ڠk EqՋkI㜕%mIR؅'K };<۹!)k]}t iމ YG}U+ [ *B Le*I;=kiP\IQi􀱐IZV+KN`|x<~D75BŨM.A8QO:;ʢ7Q.S. gsLdLR? h¿DHnIV5]hEtiZO4 JxQ Ct0 ό‹05X.xZ1+8x#=StpSO.E8\3LUΘw$JHѻN*kX$4h qD!TLqEr%y-^hLs c J.u[ИHրqn+YO,Zg's{7Z:Q#-j=$kZ f>bng̡O%ݒ3 :"%uH툂xїG1Ņ xcT{ŘfHjZPtI sHyy@T{,o8i&Q](KuK[o_m}t~ L,9AM~eksFV|%wK!/ϮXj|k+$j2 }1c^HPQe_Lbq?z_Jf-LC* xE*T4Q%&^QE$STX뭑0 ]~YuX\_FkXIɵU{$Wxq0_AW1Oxh闕Xݫ  Y9¿dsz D| nG5n(@Fr~0utwm2YZ~jn?o|91ӫ`y\M.t`d/+rЈ'WY߁_c &Pm? GeRTu~4Mb0JO6ݲ%AI8$E=N X<0;{JhČ$Ejϴe3j,$ U;ب`z% 6R~׭^{~;QnA$;/ -AS=Uݼ3F(u̽5OVQ6ȀLbanftN~q7A@ (˷Cgѧ ڞw+@FEM`hY(4#"֪+TCkiDqHQBmW;LjR2#AB@Q\1\#J":@[]q&H"]Gh ]\IW\JE>DwTХZ5qyѥ]c IƭqWKt^\:ވBk@:K-,=T ߿u O'U`Z+cX, U-jc_{mO~ĒL| fVꍭgEK[]= h~'z-R&[OܵsZ~kW.XP ^)tn~m3>OnoE >lkOA$b:o{[&493 }PڏTo_[kJ3/ןlOn'?:!O-a kX"ޖ|8S X,p퇱~,,mձBa4KR;RgdgVx=j;)Ҕh"B5NOC=`;LA$R$#`r813~3*ꈊ"x}AOsC+-k"jSbXPTEAj牢f4ޭBH9X#}E&YP#p;'nzU%zՋX7Of~={U-<,wwSHX75waV#bl9~yEI,NJΝ#f<T6|)RkF-lĐsP>QO=zzZ~>BvjGhp#(/זuYp^! WN{(,/bDjA\IR:yUI g7T9v׏ yaÎb&Akäl=Wt {sKrT?|~uUk.T^mMa7wg$ 2FO/a8&ْ öT.j\Ya'Lg *z{x=T :XL95nIt;7@ [jeE#SzN({@ k 9Z"e lsrMSVTui">?u]8Y'xs7t_81՝/܈pF΋ ލJ;?Q&~dt zYPqhTwmFИWEֿ-u >l2 ٮnr3^6".>.!.%R"a?q {/:pf 4W \/R S^;3rEUP {j;TUA g3"Gbpܦ˓GopT-{4n9Rj$=ٻlaMv Xyi¨n}YH NB+,}3U_ky#!*]D*]pskPe<ܓIyEV/1>E@ qZcWHby^K:2NS ";EJ>/*iO! ٷCA P k߳k$ q=nGBebho8b<-J,ّd${$Tg2WuuGӚ]pT.8&Jڝ_d< ,0Q󮡀@+DƬh%fJhlͩjwCwnkx]~36[뢎}z__J`2Qf+ ~oooo6ݭ79iͽL҅ *ZdQ.d]BF'dDETgCyX?cpeߍfHgHzGx_.rsv6=y{o14G1eΩToevfޱ(O&52 lȕ\d%p䤲Z~/Û^qDr>zp(-`[+f#tLٗ70xʛw3Ko=󙺛b @˟\8*L -]dQVxrү+Fz_mqyԯCm%xl%S5H/W6 +h'7j͛/o]2Ov838r+ dMn+xyW. fHlV@bK@:ߖĴ [1PP/ r+y.YJ,PXc@1(M!aYDzj%jz6q6&Rm6V^E1{PSEÔ=P{u*I̩^DkcyXxtxǨء~q#6|[H7 EA< a77=!s$v2-43[UUٓ2Z5[ZCyDk*֤[U4Mn7äK+:^\-U#NtjwɔETE*aYǞ;@-JKfR_z|3M v^{}1BVGd wAK_)Np opKmh/0NvV%Eq~4WV̴ZMZxAfۋ޽Ӕ.>󫿳,5ؠp1)&+yLl"/*YmtCvwhM+215+t#j{j$YLa#WwCz{ڸCLvfg8uZgLLKX4<t jr*c`J pPf>D>Wh@3rW @m*,6`/*"̒hXA)冇\W-֦@ګ)0HAhĢ!s7ܐR*vavX? Ѫhb=!9×/W龯칀Q|_ɪ 8tAzFr6LSͥ2uqgG"/(g%h7'|j Q]4 @]Fh1BZFZ7PXC7PktnCTC \@I5P40&RfN?ƽ1 tż\je2 8fyY'-x<>|1C'-PmŹ}腺b!\>:½8Ar2~"179&cù9BsZ92WcXW*BT9 *e7Uçʭ8tZɧ\s$4FdfC Ec \dTiQBf[JOa`Iދ|7rLuTbwG}UT{ӊmQXboIX5VGXyXGG[х\j YZI_!ekjPfXf9f't CAq; owDΆQwTD{" j:]#KHfdZSQPUjj{M=vr*' QJr|k9Мci-I.H:&J5"Fb*e\.  Ogh@_cԐ:il9 G<\V!]N.apPY~"z[ cs=-0=@ڽk j|/{rb!G5 J}kԦ ΜVrAu~xA{q.ݛEΕG>"_Mx;K}HglM CkMDկ44y&?/e=MnpuV2 1fBpb!$iHJfTNy[ y6{}uE/#wF6Ƒlݍx=C.|$gi1_n{nަ֦߼ޛoF3ejycDD\ ?\Ni4Llo֪'FlP_|;ȭ` p<G[iH+ _׳;|⫙w ?NɽnnnnjOwuf镯 N*?7GFhП쏱O‘ܞc z/d ]ߊjxnhщ*f'?~ oUdZK"A#ȿ\V /$Uip}q-:@š=ǟw UF W=|N[^c(@n~h% 9㞝 )͖y hQMȍ41 ::Y{YVl\-aeU$W0渆@{L+..CkEMiϓ5{K)Z1QJLs=>^RH;/6Ќ>1Q ɔi0h\p_ zlJ5shYg|"MIrV쬀`ĜRZ%1\Ŝ8## !t~>"a:2$\~{;,.BwtGWw/ݿ_XUfu__߿~zb/Ȯ&$;?2;W7wwW'*o3ϼs7ߜ__;d%% Gv* ɭ#mx\(*}r8e l`k_y">1~S*ɑrCQ<9ȦaQt4̀Iff2ɫ@r8Ԇ}J ~}FPd-72x \FydFqC,GcH!rm MhB^HD]k<,ND=ψ*]H?~U/ ~Y1bERyFWEKX@Ѵ7#BcR*3-)BdtˤF/A,JJS4`[d鿘<NAc. PKBV}pX)v+I5aYf6}|yby dKOR -,.[S{Bl?A%}*zKV SYc`i1P*CJ"!QE'|)!e/}>8,=KV\ظPdb4)T I,MKj/x\(jLJj3& ,DqFz% ݿZQ Rufx\(r%}Bdiƅػ$(GΜGȤSxhcC{5s#rRd1mأ$.rAz c fWPDC&O7Dh~-=pX WAOmI#_{{aa0. &/wh"۝ݙ_vwݒM,O2 &I[O=,V"2>{%.RZ3<}@"Gx[_iXECvyW wO8 [xZkSsF=U)4%RPVO(ryWԣygZ8TSND/)`VJd3a FlE=кILep33h\/h3s涚Ne(, JnG[^F  Z֕vaDw%hc.M.s2=(B? W}zԈ$}Gemy5 voK.qJF\vQ4мo(157bLߔ@ }dH Ӫo!a}Q?:OmoD=/ Qx DG_ h 2&0%ⲽHq12, Q{I [o  0&nRO]x)q+CQaJN!3LMP>2BRҲVJ"f,q/p G1u9=񾆐{vL !2ޢv[rW&?$=Jh~n;J(KaT/јRzWr`#^3djQk^[Ky.iЅ4,2R]Hb4%3$3;|͔Z3jz!Lwgó^G/")ș~|K9g\rc݇Cm;EvP,lM)YdDs=<(oaè=,"^LSH'im4LQ`8 1_ݶ.9hF>PDk ml+-dA 3% i05Ԩ*M܃f ' -P %qcͳ{x^ ڄ̠HtQ'k*B&כZ{A`:k)B3E g_NvGnFtEѕa l/4c͔JM_C4[u~l(Y=/{U)HƉv_f5EC1'`;K"z4OP~|B$HX Zg \F~>~smLr:b Ze`FN(>t<𵯚@x/o|2܅xM>jtxd︇x1]b\^}?>ӂʋm+LҨ5F75[[a^Oi<)%C6tbҞ=z=a|F 8YT8+98fZ*ߺԢF_W[>' U6( 2?EFMvi=rn { AQ`td\gٜbr(wmHL.Da!9$&½:&qPdwF: J"kudpbݥ `jzɽKn9uK~=R? ɽKmjObnpCW~wN?ɽK"'zF5J=$.&d#ݛQh>]6.󓿹߭wײ?$UpB_geRCBUFyo60vur䇷ydrya~biQɪ5tV1:e+6 Y\I Bu4eM?PaOn_bQ]k&۵=M]}ڤ!-9?l"4o% __j2~ajb| 6U.ޭffvkS(Anf9/30u8MfxarM:)8SSN@,))nIi.&pښ,tMX64Tt n$/JIx1^A-}`iL 냥C˨ |<<ҋ}(tqэr# [#HiARnk L=^ďa `MUcW`btQRc(ʢg4=JTs@`Dj- Θ(J#%Pk2qIL uDݪ V+:j(|s1ibT88$ }!W' |/3Č6r<(BlC\Wul$\HeDxN׉fKJ>kcfxW?FOjśyyЀdFjyf[ˬHA5PDʩ|E \8蹍O~ !K@!pz/yZ:b׭͒cT=s7pk]م/*9T#V%Pלtbasc3Fia;YْVZեImFlrR:'#קSs!$* hVDn}^H["B,櫫86c-w_>,ϰ.o},\XB AШRh/\^t}ɀFo}w ꪬ\EHk^UY QZ)*O+ uO^1bRk8bddPr/(S['ck[w,#ta ݵOsCjtQ5`[_bp*@*5u /SOD79"TΘg\^y%"Sk- cP,2,D@( i9ssXgEYJ(pC!B9nۊ^R@ AZq[y RE%-tH^* LMKSͳ/8s^;#,PيҁK] x{ͨ([-jkkv8JZApp6%\;ާŭ|1[5b]n/WKyÿ+|ï_%_Z1R\>=:\*m23H6Uc~_6?x^}5Z~x)^~uI ApF_CC{M D/0KoI@r.@%+(\|USbrcHuӶej].HbCT)ʊ ĄaFf"iQB7hBւs%`H+d:.*X,&bMY9I|Bw^y/iQLW1y rӈ+k(wm%*VG[r]kCSds÷g7!A6L%[^ʸ I``(aQAyLtmBUTy8!6kgm\'BУPFQ ѥ!rR7Qlyg8',9 K٬%Ș6|e^~8x|Z})⻘5bC1_i{!~JFw;&{;MOEZj?,ō?6Le~1hc,bJ6Znct>T1$:=zH!f؉'nBBf-PuSGhama[wOR$WOlT{<<5EѪEW<=j#S>es(F5yz!.)4* T OuҖz4mzik4yJmqJ3n)*B(gVjyM+2-8՘&bT}T((Sbs$4֠ qiٷڝxJ*B9[vF $xryWUCW;fmkP}vf+-:(&e8nΨÂcNaNAN m{}bbXheeO @06lu?*`NMe]\3[xjKxVuYː[%*[a+PռUtMVu5JޭOǟ¾L e (3^6^T!Rh TkɧQc[7mݨǟo tNB mLr'`= 6CZBܣ;{tӢjn/<G#]JNXw9 YMCPtm֞3uy.^[)o8CA_2'\,8o  Rq⬽`L@c4zy6RfߔYS(?r?{߿tҠ>i,7@s݇NVzYkQJ*6Jz8v`#k 𽒪w+j#Fyg^gfoԪ߭$j$$?/eb}U+MT*fZ/Jk]3Jx(rRYZpsYa7W۟g u1}YlYLYy ;dKN󅮒Z^70l455?eZ?{WƑ_^6>CV]fE OK U)rH Pa<5Uuu`Gf%)C Zu" 80i>ptBf-X}M@`i4"xb0|RJ~ .ԞJ+8저FQD𔖤fRqb,u2$Sڬ퇓 &PEBW>Uf~l8 a:!i+/L"E$ߌr,7o{TvYS~j 4?i9;~C҇O[.gt-UX_Lve6OEΌzu=qcD95iȑh%(e+,S dsUB{+_-*qJ'k+^^[Uz΀ZҐw¦5wB`iDŽ4[C([ˉ'/hf](`y3=>3 ccq`<5 3I@haVVѩ|SR-DRPH< ,\1f ( Cya*P-w}33qxk)f hM7 L(0U+s+imғs{~gfG*K$vs0#O/*EaÓ<5!A@! %)@1[JWޠ \I*TZ$({u}w=&ܖta>Crs8p5UM LT5U]y›S܏ˉrFN 1eO SqN*?#GU(5>>%D=?Jeea(,G`I@195Mt/ěff*z*~x1OLMm!BRVρۗgMb"[3:)" kܻw~`d|)&)*"i_6FY /pl&޿ڠ xL"0Fy OAdxmFD{.٩:uŌvŅw}&LK.0 Sńq*C "3[YSCHSB ;7W`@J]>'#X5p˱"0EI`xT,ZKiGlʴY^G;%R6 NJaz k0HivSU 1a BIеV$lҠ`FRQXGbHU~A=S&me TZX9nG%~J9RCEr x O1lTAiV2cIEoE=1:,N1V5_rՔ~+^;CD5D_փM%)_$vRW}~uS.\.қ2.2SI_ZLdW0  彭B&s$s0Yͺh=8H.ry@[{T(s(߻_i#0!ܷK܏~ qF;E)H1$;\]o믂o^N,?)L$qW‡G ȌRWʇ,^c[4gt9l$gfJpu'f~78bЀDyp~`[DUjWm;B7Ƞpj2 M0 w?z^Y6K=y\Ч)<-rUmi>|!x_u<<#vBPsN~bX%jxxFz! Q\ڕYX~{%SLb&j-ϋ_>C,H=\0 Si< |d7xT*0Uᆩ 7WfwpHXLK F(RҘ49zXzHCϜyلb֫:Yo[NA̐'Wa]W|g!x*SQ|Ty1x#ac<~ y/b0QE٠q'q|֏Uҏl@?W+:_sRaJ)EyO#@ɕ$:2"P%b!AৈIš[uJ[u57G-I'߀n{X4\l|[bzӦE=ȷIl}:w֧QQAH60Pj煢FQ$GE)5sʯc r*BUZyd%+N-)( s#׆ pE^)t@TCKUα-3R MF挒T{$c1DCFUL3) i1' ci.2ɈuTӔk 1q1H8o :GS]U1jYaIkfXc/sȒF!3DkFdaQ$t<υ1cUc*e Yap X1$P¢0m:-QͮV _e?<]N2R̿ .gx7 WM1@d_ ?_h<Ǜ-H.!tƌ'?\]~ƙD:ڀ"\w{3 xIڟfh tu7a 5*yA_ NAf}y `*9eJ֗9ܔRU.l^_JRmT ^ "\+[Aty3㚛2J }AlUaDA'ZC>]~{?m?a(m?..!YP\+Xez*k@;[XZ%!\bͥ4PY J"&  l+X$HsEqhn띧IBm6[S "r0ዊ";!]lmIP&BtuI<6'f& Eek:r!^lPW͍ ԸI\tbaTTz| t?^OLQxgobJ;SP%Hu'~`2a+ڹ6GY@/m)#J:@q\>#1,ЊƳoߕČ#48ܮ4>cbV3,Ue"; !z$ H!G$S%a*Ix3 uW`KOf_$"$IQ&Sl)i}kż̾"Z%{*-ljTDD`FB#RS,YBi>} $Q /PYMފ@&bʽSL#+0YY}DYut^ dF 7A/x%5I"Q${B P s857 cZPrmڳ~:n΀,KoΞ.(=dEH✮4*uiqM)Š=i "y6 Hǻ`n&W]PI5@ 6]Zo~jIAjScrt+J ׻ ʒ;[yjX.Ҵyx-҂҄ %Bf-UtLggd|!=@ۍi"=S |3B~ Ǜɴq2#|0]&SR|1;^ϲqI{w^:SH/\z拹:0[\Esis"r=㮼׬KΟs eWkkiRA[& #R  X,Gl_%WP1? /1-F! <_{L~3Ldrt~78bЀDjrʶnf1 ][s6+*lrje~qQ|Ў݉aU|Z?NzrTuZd1)?\iB\iSvi>JS֝ Uyl@Tz;[T ZwzsN62G/&7k6rQu'L( r{gÃcR5v9 Cam\ S{F8:nI0ɥ{fJ婽02oތnOӤ%k&zgnLS;=ɣ߫E8fbKxg(pA8FEzN-4+'jj#hR]iܼ.epI%ڒVP>۳EATSk`LMRv*@ \%.YaGɶ9~tʊ`5==9aYdi0)̂f eaa;:r$o(JVm6c;prPF()k4NQJ3g8gѠ(ǘրU4 >!"[vz={3͟^XSƏ1zƚF>桺_,2]πeӇW6U@R%`"^|qz:8˸Z8{ C|0<zziip{R'=zaJ\2FP+UjؕɑU0I +1+3: ]3:Ł5YŮn2 ІɣK$Na)ڰf>JzLKP"ee2GAk2'>u$P7ARǟV 1MF'}n+z-'U&ƣP6q{| vlWҶX-/x+~6l L;THKÂ)qD8τSqK U93-& 48ͪ|'s9ya2fhPS݇ۋRPh29M`z_U԰ȽvC:;+uq{e*rrD<5vX|#K_Y`d\VVZ r"{ZWJu{M2!{]7S,db V68= \8)81&bT5j*YlŤ8$W[Q i߽}%]gti@Vݟ_g7EUaa]"yײ&W͖LaM cQ5opk{lzYʘ`l6\\ۛ^ ڴeE2=A} &eVb3VHVvgE9"TX8[nc(n_B>Q6iɦr-aXxHyɔ>ሚjA{4W$ >RT VKE3St/)<:YpW'Z]*~t/80:]Jo-N'?V^WWfG9–cb b ӄ䚺m10{17-kp=*m"aaȕoDa3!Z9nDΈaHj#K?:UFM 䯚e3<q2،6j^jWY6~U;P y/nr r\uwYs\k*>weqRwWysLff;Iԣ׍#IU3߫r 6 :'7loLK[InڐD2ߖy$ȡCUڂBNI+ ERs $bCL)@wH '-73sgru@,WR-1"t%AѾmKm`rS!K(#i%W@Qi(6sJTJB2g6&G2 5Dᬍ%YJX8&Pu>sU>vh.[I$$:(`8.}m[' g3*Xas(h1ޝ69hME?b+3{RdϾ;'7ŗXlcĻ.գJqjK{=| ))bx9{p:2+=?yv}g]M`xS- X:ͩ #5 yS8+h1?I-1&flc`wy"D 0G7R|Q)ꮕκnel&8 n9dѥ {uqovg rF@d8JE|x7rۮ~)-ϗWuN:#dy3,܊ӈ,j[sq;lᨋe O}.?qSG+{u{ 쉺xpјW.[sLZm:ՠ|T b gg!zjћ9+!!_&t&S(DwD Z$>Vw!ӠTu݉{Uv'jABrm.SIIW^^ %YL܌rc),G~=h̍sL\!^?Qn~!j4;1[qF`*Qo-d/.e?JlypUK[U<~Wyժ#W=R!N3{r%y=B2]{CS/Џa3^NF֕_'"(_ݫYB+eCX)4z Yt(21F0˯;hk!EǘxDBJ,Ԓ[YDž=:ZY< ݽvc`"4 V5 qc;(=s_>J]Yxv5)qngsퟌ:sיsΜu<\gw(1Nb-E؀Tt0R+d& #$̹岐,8C_~<JO3_ׯ$Rt:ZԪ`U2ENҮOLFJ.=FIB뉢*(}Xj}WsZu(N9%A.3:' A2m{MΤ98a27'% m ZX1VpY&292JfȁG+ ig ٓ`S/W04&y.xÊ:[ 2,ҙłDm`K]Eӂ8N1X 93'faRJtP ȗpi4ع2AXI|@X .Q!֡y@IK) k,5*FmEba(`q؇>ޡ ?xs6a/(A DwܖXK0cX6t ƛJOxHK:Ť,16QyňD\ &j"%n*HQ쮠5g9#+<~{4*9~Ѣޛb2Wvt?FF)'ӬS:l!S!D$S auQ@Ӕ!rXWN aP`^d‰\Ir+0ƙLTS >NAUʯޯk_GlR.ie[A}ᔀG(<[A!cuPrX"ň9 P̫S9,KÃhdj쯾zI%H]İ{cYVʈ8Z{Z%w&˸B`X* wMZ0gy%k|&R`JO)y +Ӕ^Fx9́RZZ0BS=s G36r!)htrG܁: ^$;Rpdzx,2-u`XՇ-xy\'ATҿ7Z@jR2FR$gT0 BWhvb;-}v'&AY{nV 9QrRm-  10Ip5 Lex,FH vrSa@Y!MzIVL!!TN8~ Ŭ8oA.=^T G ,l K2D UK0*dQf0@UݨgMVk*am7:s.} /FP@7dJ8n&WŢaԬ\BC= 6R@Vh,Aey=]ȕlܔ6n&tDv VZubE9ܬ}Tz BHk1u/o /xyÄpa!ƞ{'Dq XRj"#Ѧ7Q' l$>z|*S$VXjg/R@aS]F>Hql0\$!C7cq"AT@GބyE :i)kXm3\T sQ6."xKHt,|HsҚob @]rX^``(&!pX \вqc  {ca rbl#L qł@|5.(5 ) "> \+^ Q>$$fmp&c691q $F;n8w}t3nZgBVzӤh~̻@#1NZh|(5  0}iթ?vBV9i*RZ{ X%Z؍\WWp31 A"̜T)6# ʋ3X` 9%֤U\Yj$ސ +YC9*MMWaإԃKRcISeA` nX*/ ĞXrجIVɷw,XˆRӰ- IBZGP)kWHwWGAf:4M=cUt;/RBn_NMMSZ1MsfId]!|.N{:]>_OlA˷}??dE9M,:ae@oN9VH3zFJLNgI8K#K1$!F }Yf2aV*u<؎-x0؅9üs_G}P_2<m6^~kp (ؙE6GkXH ) :]ǵu0kUJPϐ%Tz0 T, cJaQg8a;첑l{Kk,`UqPK)5c͕zLA%D%Jh82f4F41MΨܑW ;ǟ~>4 ✳^ܬoZ/:sȥ,fxK0/Œ,oo ZQѴn2] ޼7#}"$Mb5vu9$fyrz"]oAwqeK!ն/ 1՘!AmGuĤ\" /|0&3gV;pe<ϙ3 ssa_~/9SdqޜB HƜJ)Sddr_O$/j4O/~k~`ЃMhXBh)((& ZB KY7gj-a ^&XQU ZP>7/r$jip\c!ŬβFPVYF4r)QkwXCh `-Hʿ0>?wXETUD8]᧡ ?x08 RwYj ,ӝSq4RCQ!~쿊ڝTys*V' Z(lUlMp5UX>t$2`|.]%u5!8 A`z A{σ>7s T""|v{g:ӥrLX'<ȃ"}u֬\ԟ.?VAAgMh>J\ޙ%4Q_ ZӛNLf&|6T5OZ/u5ûc2 9rqA lޛc)ҴtXLCz٥[iP/wcVnZ_ΑE*#(_=]yCY4Ľ?7,9t urǨ#ݎ{ERܙt(ݚ΢YYtw,Qiv7wo֒;︞R=b.{Ǎko?|FWEHKBҝD"-gr!1-dIԺr4(!rGAЛ=ϡ*#KB=~(r"MMЧ&>^d 5Xp_Vhs+92yz㗱<fLƙ탙u6Dc+e}÷zTUL45fљ`O;ꍿ*S9S&˖Cn%E`3@q@USBJnظ:oE8tnt kan$^AA GfY867]8-)T#x)R3I85BᩆC8 (^܅AGz>;>dI & 9W\`B$+UsX%/`^ Nh  VB {8&®x|C.9UHyqNtΈǻj4J"Ɏ1N@t%M#5S|`Cb< e~GiW$]&S W&2(aάdfeEKGƼ3gm'qCɢTiQifPP/4 G2` 6D}Dx(,F! @J)L<(a'!B ! vP!3Oጀ8y3OT0EnuNm !X@Ta F9)m/k\"b ޏu85jUh. E)Ḑ+n c)JR\KҥcKXKl.A^{xvNɰ?%?D xn6ѬF}c%K K3i <{F S:Kp:Cْ9G!. bh_#%p~tOwۛ(̀=#V_ 31.7i؆ĴLh 6 Y 4o. DEs| [ ɭR\yz+@c/Cr1/G`Q/@~{c4]J^k(C NaK؜7\WcBĚNW6Z1ok5?y>m`-$L6DG)6AՅ`P[H]ўP\Nxr9~ {A/%S ]sVCIΎ1N@v*SH]p*D]eִkS(î= !ͺ^~w@@5R:_ھK5q3;e3 T*~%!@ڱ5JBme@HPVcD 3[sefOhL:cܠEmM>, gb8fdB)4˘fq,.ce9bL `# +D6J G0+`Y,9q縞 _?_޼뵛\@*] o~kW-gaK={j,H:J|8!)M xW yNس sȎs7i/CfSX/y^ I5yY*:4Y5I,,*y*{B0%ooNdzww\A8tS7.H_ /Vcx _i S9erq]-wYre'C\+1X;f@I/i$ 5Xf.$$d'Y@YN+Y!K͹w+-KɉX{)-TS4dbZ `8B =AІrXPr_A.JB.㲐ۈ-#R\C{9 q1c@;c69C!|ȿ=L{F1}8'Ή˼v"l')U[AHiRXd,d#a@ޚ)<ĮTLY ?X:6{Sx]CA)YD,mשS,y0ĞX1rkvo8v-.tv >x=RW= hA[/JzJ [z/MukOF:Ƚ,ěo1Uts;5yb,Go)&{'XY܇'% P?vNŨ /-DNJ)KuW+`H'-vΊ[бX $.G<]MiA]jݲGd<O산S}美W䱡0v VW/Y_g H B^yc?9jӻ ӋK:1:}٠ u?s"n]m8+Fە"qdCΗ4DL7/Y۝dpۯ(ն즬'A%TXU*WɖW~Hz [͛MۆƈqF¸Xk-le٪m0JJty_&=n6JN儍)k ՚1BVU%W5FL\f6(Iy3 néOL^YIq{e?ma CEFp@RQ)p|mՃ3b|f#?R1A!ͭ^A`}Amь3w2>;Pމ4٨8I~8>B½"C2`zPz>Fz}{۟h61)#|n ߀qqʚkJ^r%W?'ya,<\F9tS"UaXR͑~7w#]WϷxhR<~\9|O5PcTw=w+SK"hPۿNO\tRHWu "E9qb_ԳɳIO]vK>OfI7 Q0~ @ {_Շ\A} #*̈2ՎĞYєMfh)36JtVTC&b8kѭzj*g‬hMDs <.E ?Q(Bv"\򤮘}KqWWQU=8cՄpUPt?6g:}_Fk&avw3q'{";-(IkP=@]ϊ!cن`5vf)"dhƮ{w;0Q1Vz qC(GQ \yc@vL)3( LQKpgdo%,eڮchNu v<&6Fh;I}NVIRxҙ9<&U"kf18!;|GIl3 \uo,~7A!׎$vK<?%q$)G!\~*>)IK̖g#Et; AfTCҶvz2¶C<>/in Jxy#8S_4?d8 zHqsԅ S"ʌGkğX8C " қ:%lG]p5˚ ѭl0 A ~v'`ʐMzh3@EǸlЎpso֔Dpm*Ɩ ݹܟ|ؗsI?S7tO14d~°WoW%ҮeLq)kRbȦ`cB %V3).( A ++7TxBѺr7gT>Nb %c8džx͟g?Og? WK)}7׳MD$\-.'ed ކP*dovDސdKyХx~}6_׋R$."[jjfrbO*+GQ9nP~=΄]AS.dC&4EJuS"VhD}@m9Eɾ~Aq}>ںD",@6BBe7 ogy‹3e6g8;6e/=}+mz(/zOrCb{dR;D?Y>Xl&ßh]D"ZmUm5wYp})a t.![^0^g?P8 rK|Ƚ&ԍ=k<6iד~>D[Ϣi>ˢ|dq+-uk5OQh%$7I@7n|6v|R*ݽ~5zW9'mP6Qނ0B FT` B@'渮Z@,\3o t-R\ DS@[˽`) *C..bۼ2N&-[;+vu/# r}N&\jfc/ g>C 3² " |»r3E/Ȏ'7 `LF)&|۫JOǢ 4HAq?XƋ Jp!ҍy> AQJIyqbV:'Rn+jU:%P)B0L3v|!U:s*V.P1[7 A\2b6B#k3W%KI\Eyf0/eʲt9S5ehx%h+^\")`Xw$6]#]PD' 2;$aC) WHer<M6|D{;m6ҬSHDPb8/_q,L6xNSC iN `JwCZP ViD!yZch*cM)tO*V^U_Շwų8.IZ97.+y/`Nn\ |UKזH 5 ̇FKIvoi '; Ry [Ϩ52V4ŋ\?e\HԔJm`#mFubR S%7t5a~a=h@YomH4\ ﴟ,]e[ʆ0~(oAuϢ'r"OM QJMaK-Ofy42oi/i@ l[z=7n%kDjӚQ$Zj#);%uH,+J09'TX=]&rBd'DbdSt9E '}Ě[$+5w%{]ꋆ`JAkEbx]`~Mupoǭ QJaJyom+i $8}nOi[LTtb%܀xCqߧD}}ʝ$ג[]^FaՄe'+ H:XITPl!=) -3䜳$!fg|j+O^IYR%I=ʯMJbJ=5$Jf,4j^IO$Y^=8/褒D$ ס/(d]M*^ xF 8w"<㘒6A ཉ9ʂ3jr¥B@xA0ܦ 6F7"^ۙuBҔuڥapZ:f|2V{ }@M!NM5po-"+bu_@zǘkE+z{hm1ߚzco`|ۙ%7#QjXg$/I1qr+9eeر^gY! {b N S*$/Vٻ6#W} rE8avFN$U.EyIYIJCBҦva[hyj 3 2b@j<0K l*# V*0K*WT38 SA:.5E `̀w0%rr`2ʢ =Zk"d {6fRL}vt&k+7oroIafǟ\/vC>t}z3sLvb=a~n9lH-9k(zsӶ$A|~l6܁k+(82h(Ќ5ӶJysУT(v*T4A#Ht)_,P*Xx&a1Y BFr4*;Q4Cuf5h2E8z-Jh*޽.mbn*s_Fa2#a:_'0ï|7_YZ,j֩B` Sh`hK7RDvaT.0qEZH3j*D/VVّꏄêNQz~d|HfZ ?֨Cˎg-A|o :a5/e̞/!$Vo7ȗ"E?PqQF$`׭85]a}qWv 0:20&1j%'-#}1_B#ڞ$|Ŵh@gjED_obspGW= Bcǧ@nbiO3wW!m+VFŜXR8p6mal" &Qz(Iޔ)]vYF}$hR (ިs "pF8۠T Gfj4RIMz=Ҧ_T0?LS[X< fgfԑo5GQ~7>"i-O*ymd#ḂYi pZ)EiyOyl|1`*g1l] Xcߧ 06}ư'ew{YWWcup &ݢhwaFA,^3&ptFˌ cU+Q`g346$`D!T5A#3Ilʗg5lȣBIs * ~Ty [Ɍ6DNE9!1[Q;5o- J Y]lߪ%_+վV %;tX|G887b4x>WCyS8b !R<0F>CT3w3ա*T\k{TWAZ%W-n0q_nSpʈXYc ?ۯB"SIߒo?*],o}>qt}k[Ï_ h: wn1S\, t;c3qUt4wW8a2듀U-WHܽCtK̵"^CMq,C$q*w zb/6xl)v / ץs+x.+e̵$t7r1?KwbH󗉫ӟؤwa:QO fe(8| \/ sپ ]F ( FMjl!vŎg<ܝ0os6o/ , { )k3UlYe31!'R%nI].yiv-RW=/&O]n0ž9EYG|\<gn$' ZkTĤ2& J8kWr C|+ 32, #+wFbA#a.hI|RTI" @?2p 6@RhAB»}XO#W3~+Q9>o]KLh}:ߖo /Go)هG|t~2r:xޛy;3?U3HOdyV=R2l5]P/{i(Bwpnw{7QxI=Ү&[#Dٌ.}[*yÑoە'( REcAzp=pwu Bϗ^A)~j"5@T_.SI5!+xOrn|Dć 8_?-Nn6Hsk<&D##8Bc%;B/`jI#I+v0pܼXѥ&7FH5|&[xtM*CO%H$,&tDQ3C Ӵ-)Oԑd Mn?`NO=׎LWR[Xkfa5W .h,}s&QSwUlt;EvTP$p<>܌(ub*{02c ƞ [@pc+j5}q񶸛& .iUۢ7Bc; -l+,qDf|A80E$(QUw߬ [)sk< Mi)KB焾?"̨o;qVp)Z ]qp-QX:5 Ԡ-alf> Q6``D^]Ղ@Ec.Y틹4F2r~nޣsŜ3IY"TIqP^ aB: 0r+ ЪUQY~pQ_t b>]^y(RǏLɐ( U&Vxg cq,uP*5)"B g,J 9"qZ۪TCF_GE4**;R\幎'56# ab3lEFJuŮlUl:1R٩ԊTE"{wR+ V.ө8)B;goݏTUBAlO/*bm$Ŋo܏"4Fj5Z{1=VV4L"d1"Z0[W'9i:3)DĠ-Ye0GG@Q㤌`JN% d!eiy0^hws㵾L1Hu1wx`W)2`j{OG3VZI&Rww>K0%1rؙ5 윚ᅥh) 8lžnr9# l0uaL)fE6lM| +gdul=Bt)ȂhE-( "H J׷y$L*s \f[P|9-Y|ŸV9 ,|M@5-( oMaBrb (˃v^t9Cf"`R^BsٻJ^/{ .}8 O4 |XJx0 p#ǯ9,s7Ea|9rapux5q4Y43ᗦJH:To >|ے3JʕǏV%nI]~wi+zmEif0p`.o %`ZT/>fSgFhh2qS0BL$1`l mef%Zh 3SRXAgR3˭uYvX0W4ZT&,.wN>9͕mY z.%t&7A7PP:9YBDx-d5V| M@K_&{z8e)B$*77Uv; '2}s* Q,h+*!ϼ8}R})9E[rx`wW xy^y.c!UHu؂[#dv Rb;w˫WeUٻs-h]FN.+. ^bqD/8""E=_sҞ7[, W;U@]MLmkSjavKwp,dEvt\ J.vbb=$'j<&ScǍtdjST}U̇]xfIRprb niS?իIˁ[59uI9*bar^JE)QZ*$M.ٜ-^(M PTǔQJo\CrRlךh XZS9BLZS-a)3aW\X^Pf\Gx!R KZX%,mz<}鎵SEfozwгNA{;+DYH1-8 b*ފz6 AJg;+sM2ɠt}cH_]\V4%74Z`lY$h^6M$S]\6TJ$!ąJ !u/֕.D9XX:.NP*Ҟ+mHcRy 4mya`<6c !ji,4IwE*,)uQ- G*ȈȈ٢ (]Yڡ ~8a]-?3l2ǐr@K rl1Gc2 n3(}SS[.A.9O ϋ3c\HdIx3؈e~YqZ8uVZtZ1S=A` 3DdO92H @ElC!ǔg&0)EH[6IyN6IYC)pb <~bfR ,D{Ks gۦ䱁BM-}%OR5xwd 9E/Nsa$c‰:I`uX?͔XШPql^ BC:W$>6/K `=|'їC/tu&ɧ!UDbX~j 3W$fMB3HRׂһ~:pnOMp=[~iJ&rʯ+:ݲ90+Tol}HםI`+OW,e%R p_(-B\!^]gp+eHQB2o 8(x x8шf ZM*w,a97)FSM`V!CTU cdp6:2]N'^P XHlVhHj!z)y)KP`Az?$Xp"ec"T(I )"MB޸˹V%cjlJZW*e+S%ޒ*K|Fξr;7O51kբA7N/kFp[v}{qoЛ_7Wff|oonH"#Bfbzۿ%ȅbdYoqMeqmk-(3k UP`  #̀@YvB=0Ofh"&Xv 2VQ:-S8%4p\Lq;}8?^Hs,`?X*ce0\,nK_p)?uog7\3n,%ۼ۰Xle7cx7<8<΃cTmSE}ݻz{W<,8-V=o)GuQKd'K$ ,9!'A=s|Z "O*ӿ((p|(Gia!1qQ'>0ζ'ܘ+ cm48(exH'|ܓ[rYi$\-Ggl8k~Ƭѻ~zh7`Ö164[#1E!?=,Q䠃D7eICe dY}ֳ KxW!aIAгS:Vy^OޗhW6f%ć€38xZp ƌvʢwQ b\=ٌ[~s >ͧW.WyyOxIO3+ؘy0-X0nyw̥!η |,aB؝n?3:pa=ti3gvW殘iXew'@_'|V&uJv9jOE!<,ʤ gY4uAh|}qOQ.T{wj4ſG*GX|i$x*kbQm%̓́+5ܬ 8GIB06V8Vcb~(&x0H{9W*Qhc ;#)3c "c qGL<ĴbHSCd䎒8bxW襉 lfBxbz\MEY$snRl.`%`iNXGTcX"l!-12 nabj3Nv>F@)E*hu}3P-÷z]}/m_WKb@VH:/{>N.绺w're&ǴJ|NK۩cS2w袅]_Rz^ Lpx"8|Sm,u,1MGVXp .Wi?}c];3uuxSrNy uqɊM,Ә6 MifB0RYL G`9ވnUֶr2OSM#~h"-7TC3IfftK)DMA 9ƻKX^]zmfi@?Qëug.r\ +')UwQ>?4xZ݇Rڰ]].y ۥ\\+V.=Wz d Ƴ=n'o]vAG:qK{gvY9ofԲuǘk|#SŤoHWw~U?(Ƽn 4N5ַz;a"87-ؗ)6sSTi2& ,||Q5e}/Rݶ|_&*ŭ(b>msT#G hO+jnͷ[EL#M0bAK!Bn{E"u%62{,wEbB {{Qr=aP 0"GQ(1϶VB ϺPR*&)YI;}Ӧ'N3".6_v g((r]h}C 9 [7HVT`EiXʃ3x ̃fk4hӈ!@>{X@,^5SA!oL7LUUQ iN6 `+_)zzRJW] |gfu689]mϧؙMYEF=y퍕A&n혵X+JԸ/2}dt*A ?"ٟhT;N}mû!R*>}-_¤Dcǒx&ȡ#,cR]2{fK8a!;րDx3*/.]VK@8"GJ/cg3^i0#깠5񡊅pfԟU4KP֬Q\KMY[(5hKu8bOV52SL7|nʋMo/\t%KY^3kM@ǔ iĩ$}kO[{||8s6НbUR J0$ib)Pzna>M Mf[MbͣWo)]/"Y[RʻFhbM$Qt;;L)Y\Blm1),p\dÆ 0Mo C@W+!yqKRKLnӚ:%VlO2_-G L3_ՙgD+Q0S_ط-i$I"%Y}Q̾Om&bFճ`Kl2!c.Z'Y③Ҭѐ}}X&D^hCAovNԛ,`>+]~ƺ/c,F`pyӺ\E'ԬVݚ! }j B2t0K4N U@qoZ74ho6j.nja$0b$O/`RW_o?p2ɼdi2 ~%!SrrA֓mdBvC^ )w$Ͻ<:Ik-OND8RXnYtL8U&S6(%z{B7g>u&fuz sRs XGO.X9A}QuqI`!hS>IמܽhњC^=+q1 VS|A# CVm0ӽE(k3pgsG/:@L_#;.ؚeb=`톅L{;N{8>n:I@ԛJYK&;X8& ,;j?ElN25mSYjGxv؃Mm5_ե,IMM;ԴcMM;Դ55Ep6 T E@k/B\3v VI J)XaiiQ%K)ų3i )+:=%ʣ69#?%9-#^\-d@4q@4͢eތcAԧON)6}el}^VVk6>-d^eh K6afSp!oAr= APi8BKET۔_P>ޞ M88N (Bp[H!9q֦Q|KTTд{{ %&PU@Q4߷NokIIEMA|jOZ'L7czμȎB؜H@7rzm6n^mZ~`>=۷p(,лnC4W]Z'OVZsaآj?/G9v녮՜ UԚ`T{e=xr:nk5NߵAM㯝pvAo~,5GX_.[.F Z; ۍ{@N6^fI+Gͣ@.ٳ#p CҺ3TqCin}z?^^]q[PBPYޤMan';A UєkƖS)IϴQH M-dB$멣,΀Ga3`k!U-`EXAΆ V8X}e rqzCc Jk4*Dq*e‰T+*(`&q* >|np,qZq7fհ4^?15WQA}]5}6zyp+x uuйqxȑ8^4A(?(b?@S^^O;jZt!o-W G.S΅!vƔm^]5{`n'FI…ƿe(Œƃ3#"86}{!Z;s_b7^SsBax }'+jϩ9LAַYiT N<*Msnf[ԘPyRqlsvNq|IRE|+'6")|?)3 ~q}.VEUᔦZZo8LE-I4(L7ƔZ| LURTͿ5(PYp@iL8-9 i!H @cŕv {MVU,H}Mk "ԥD (wsbZ!L0I8>eF4h 1.pF*W߾ec8Myck/ȯpUS)AƃAl V8H B[lXX,e\hxB مiYU[(mfn\<8~ hFIOobL">o9h[D ~nt ?/ wGLjOw!1ǎe7q,1v wfJ!7}H.:ڃH.Eh>29y5X|ڊk/rQ,me'`,$-l<,蘼s0+213α] K4H -[N$#HAd^J:~By҉)H'c:W:5AtN;Q4 )jl;t2Ov-EL X2(WA:  X3\B ΑA(;edP0N@ ,䄼sqHeCqQ|{JYh#6c{-!ji{_/e$k0l=IeeqJyBh́C,ɸ ɳXoԭ9/xI)q2Nؚx?]rG_v&랕Æu8ֳVٗaUM, @v )A#MQuNU^`X7٪-eF~sѵ4 W]"EOR$I,F]Pig0 $CVZ`l $%gnc won&;*(_H1 ֽJsc F(ޠjݐΔkÃ{1CCTZČU qR|r+ws=TfllMFbyKg\O<,e n^wnJM_tH-Ž\q5o/sOTbv,NJiou]I *rr;s:1@id0䖑R}RDzOZST̻4?Zl/MAkUynY,!j eF;-7MHO'wFfϣ̢xp,2IyCd24:t6"Z-)N? 8˒r>l.&yԱ$g7GZynd0/>?[1Yl-XeXRpCKw!~'uH/# ";H`ve.Jޫmà[r k)jw\s\ΙҚW^;_i1gX :V'M^~uk֮=%@,RzG>~̬cA#&_苆CQq  \DWXdӜ GX!0a7]z\d#ҋ]7VG{z:dƶ6e_\//}]zNzpm+R ᜛] 9[m/,ݖNFpyG#tv1ZȑG` *幯`l070VM^oI)VdWnѶ!e rkɍx7 )u#c/;tO5Wo[`<х)ݚw| TRu?$WXKg֒^ $*)wm&TCgj L|7bi, 4x\_C(e҉ 4HQZuPW Yf] dQ)CNл|kzTmMնc mM}3Sh>i1Vއ0x}Ҟkvݪ ʰߓ{K'C=37X{Q-צ#{ܞj'#ae7C^HE&|Y2s[>Gl-/(]N#r yꟓGA KWݿI+Y][:O+Q} \*/ğ.t8 8j[ZGOѪZZ팈DXr*T雙mXtTM*Mv;8^))v^č'L|*|tb&,vc'dBݢz;)u ;רTg[c?0#wxajR %[eL+$6X.T ߹5yʈh;0MVqje¹4k^'W oDԪ}FNC0u#nּӤvyAm/=}2:l؂{]JW#!T -! r %Y Џt6p\OnzRհL4 ~Y |ri V2)X|W\l _I{-'l ggtOZ&O8cq֮ `wtoPݔͷ5Re˶Õp6 A'j y=ݽVMw_f^J-*Wi{R-_? @noϤ7-ȫ33~ަaHO?~7pu0:Ȃ>, I׾}xxإB<:t˝U~=m(9gA{tGCt}p49j߿#bs"ErTҋ9TIe!񜽲Y{ȹOޝ&ҚN]-.j xߑ~gw@h0q4Jt*FX:w?:YuWUz]?DEEc[7ueXD̊t{“N9N}1aVyfqiz_PLԺ&Z;X釃߯EdrPfr`hmdE#M ?yR$yHaﳶrmp׊XVjh&4KcNuZK\cSI&Izi"QXP%t\j2X$u Br.}p 4%u K,dEc.s d22I 5GhrHFp4Ihs5s܆afu"~pm< ''io_s6ZV9k4"z=m|%{"gL$P,d-3RPS2$&BtH2+r~#e8}sk. 6ͨ?7b2,.q&ϜpToMz {MFiR!kO3}@jIL<ƇU0JFE8.wGtJYVXOf]?=sIBk1q@?p;ⱃdқ[~,[.UKl742Ǔ${yz:}ImV^ֿs&Ms~_.G#zJ6_IOI98{'8`%s*6m n jm˰Súa[[=v.ZqdAs;%IRdW5jJӧԞ\HR$l %OЬl*h?خ]5(DLwl);u7ź[ȌL\۹C}\+bMU$&!ݼȨe͊?&ey#yi?67O7r\Q Q 9GnH\yN704\oRuOր=à Ђs|JF`߼9cN rk/\O||wkl!猋[FKnhR@$T~.Rk9ph,:1g[EϬ@(97hGſ{{oHKxT@>i/DYx-Sq2 s`=RMKxv}Q]_QʨY:3a2%L7ɕ\^ '.Aok4_8-DC4:~Ayt>[AgVc UT 5SZ<&-yλ"TRNk̕&)!ˆ$<-y &9ms)QP-eq!dn69d. $C˄V1G)ْrh'Ic΃6Z2Pd:7<2>eGȠuR YcT.E6J#j;N,yI&iȠa iD(9KMي{RRCR렭\36 R «$dI@JށFNrJ'E;kWmnri6\.,J3!MLB5&#iC`Q[D% GíXƉ\jnG{KMSAx6&d,1&Qv'?@Hhmd 4+5Km;H-y@p+mF#q2A4T|:z0$<)ˆR#%k_e|oξRd#Z,rcjk>{բ+2%߁OW§g>;St&Egc_ - l2 t,rHs ($ ڠ^E9SͿ3dxY2:xt 䰯 CƩ-=g:$hsO#|&|[$$gv<'tG.9+\Q,*aׅx8g*O)OO,H뺆u +JYиSn;O؅^ҁ5;&V+n~X4Ge]`W]_cTor*`ɧ'}%u_ɪ|s$Wr3B.=^{gojaZN~nr.n0T)ݿG@\uҳzTp:)+S( =_J+r#Y˜,5 V糲"Jb]I6ѓ7tж&?t+68jڙ_' QvW3lI4 'b,$'MP#mJ$TCIR!F\^gM=W^]op;WmٰRżwQ%ʥ31cÌtq9UY.sJb"YgPĂ%u t";Q Bg`R2ǥȜH.X9NT^.ZE3x K)wiH`5phLVYm2ic|*R"xY3Y8eCJ CcC.Z >r:Dik Kٞt I&yu:s\(Ո60 ܥPB*S>Z*}圣خS)tfarU,yIaHWׅt9>lDBd}%g' u=ygo6~qLyyӺ;?/\_>?̥=^T,I} abJ7L鳈]5< 02e%\!iW\|TVv[|:뻣VLUϿ/I\WOJyw? W_{cZ{׃RpsO5Ȳ\{BVEg96mь5#"5:\2o|:F^L*_9{لn>{:Kb\I_!A/P]%F|=|VWѿt5v޾w@-F[9az@= D+X L kBVjzf]Hm-/)Ҡ\2s)q{J(CiM˯C `61瀖ɨi]bkE0 L6H. 71GEYswЪ́znlYeQ$b9t=b"G\HnjI+8>%$]$vЪLޕf!%6` !U@H'ut*hڐˆd`Bs-2\˫(u-uxB98VϴtWzvAb!Ÿ)Md U "eTbdrRCI;I KGk+)D%$<Ƹ:L"&~ UĢ4Z諽SE/T{pւ٩ٰ +#̀+TiݞtbX[R* wjѦ ]ح6R<ŷd1|! P+WZ%N~\n2 :9%ol[hS>߳I&A6Shz!ti]^r̲! σnmgV3N/iđDl}=XJg!zqw}6>ݧL?A/,^Y,xٖH-4DXC$1ĢM6H]&2K/=v\)/V/>,G%aU}?܅doiZ~$Oo^Jb-Ajqjo22hUOj皸Q;34$2ˁ' T Iw Qv C]-OlfwTAR/dZ賀l>Il_ɬmzC@84)#HFEdd {>{k0,2:r %SHd֜562ewafŐuqkw v/mh#pƾ&=JO+-ԣ;d乸t'K+e0q;_vw'm_jc2༼S0sWu/_ ('ǝ#t"=Fz]FWit;<7ud!(cQ'jgֹEkV=WOOd 2+G-pjS5GWOOa1=G$2߱38cB/&tIQrtX`4Hr5J~4 CњT*n.c(%oܯK?=m"^] 醽p CYn]%:-*  ZX1p5N㧯l&+GkګnS^unvuL\] ]Z/Z1j6K\jYUKUKKXl;'XfUב>"pav7w_Uw{Qvs\ě&=)hl?4wQgl's2&l`r%0s| w+B[/|v"S{!/?XW{!n]Ê8+g0rg<^0YLz'A̕<׬KTU@9[߃"n>:;Ct˔Ah47z;7!F#^a ;y%AhuAL\ۡ+!F|Ul4l' SRk/y=V„VՕz#TyYj_1P%}'Z9BdvoXVӒdyC`NjgAR99/nm00mnYYĤE78c Fa\k}s1 LoWq887Sr& Wgq\d%r!%`a cI\c G$y!r^Cʔ-3rl'ZQCvoKW/hU+-"NH`lv'] 9r@փH`U蓨NPeЍF%gS߅pI18ׯ*&H .{?}?s[8Je3*7.rhƁm*H29l -׼p>vU@ksa{އ:r{~̚6T!c6q蕢B-,0//͉FЄ48נ ͢]¹^m:fƶBT!v\78EdʋԦs?ٻFn$Wa2|!N, >meeKcvA[f[wC+gm92s=#q$f0F+ AjD2(,:' vSHa_ dqJf߆i0 ˛q7Mzl\@7 ,}B<؀yJ"TpRL(I|)16\OAËsFhLS BSQUQjHpxnG($(/uIQ9y0 wZ,֚ |("17DX!EGGHGG0`f^\bězr 8lAhϜVⴠfF8Q%H,I6&3nUAOt:[ =(wtJc=Z=7KI'TIp:/%޷\)jMWðj-{5ϱGH8i1ӹĚʂȥr[$o*~#92G,Xg r1LKFA;YЈI(AQaDd!'zLf>֘mmF=h%Í!hZĘѷ ozKA *2"QJP&X#20TX"<l֧EωPU7JOk@5ߚ̘H Q&Kq)X.ekVPBR;9Se%[ӂ׮\Ôrvӟ? j4yK 9xb2^J "e* 0G&dy?v[3,X$&:Y$&zx[be#x) qdw`n=Z>qpn-~n+%iw#<;Ksq3-eIQѡq\s$emfI!1iݩZU"toxwFHlXzϋ'?CHhed&/r^– (g{A113$ZOu-ԽCys8fcZZIwoOj#<.KLk7fn1&z\M$8l{ NC a\Pv,uȒ\m wܩ(DKdih)ٮ4_IJY5[E++wKmv Y*XP*ф rTt@6FVTn+^okN_/-eQ1iF9$%m<%%ya~푴aJW6E.E!'y~ZKwYosZTr*ٞܞ>$Un/CK &*qS˨&,RB!@"jV419I; gDÕ,7/R^YwGYo!>4YoM<^ązIS3-~K|)=G"T;md!ZHWnQWL5ߡUNo*`yIl2mI}:0NgEh`&,/ewsSc ~^},]|VԟL_(ztSy5vk,{6NJF>ӯ[$Iݳ^3%q #!_6)YOu$ݚb#:MoyݚggnmH.Md #tq[S rDiuy.o2b$d[OaKGSsqL tlʼn?ӀFZ_5jNы*Ň*} rKjnn).m [{%H]5s+J$sU76Cm/ދS\/סſcN:9mVk]0Bbd i}@d5سtD SQZ]0$L~bƾ"q ?uM{ŃT?f\v?o]f%;_y:U=L#\eNa\d!AQqjl0YAΕES<f $œ)(M!bڣZ-BXYL[!R$a~+6(Q>AOz W&ƂN;#E#'RhLc!8dVno?Jy0(*!1bXC c0~Dd%\g+_M<F)uztV9ˁ^*%HI{we"t5rR6nʏ~胢e " Det?ۉd`пcRJ.9[~s{;ŤI L,jMoo0$(%*p3\ƵT Ú%pJ([QMA8bݝK- o@[RCuC4D XYD@m*sQE:-#W5HPQ[/8H68@C FhI6 'hC I?/:feq٭ԠaN>SoұQ)`**VTb,xslX@8rM٠NI05Q,AÂͽ  b<[&n\\klޣS\Op76H͔/לA-&bQV}: j1uwy*mgi"HewUsZm{iR|.pTo`H +jPUY*"D 8@[.߂%~wJX;pqp)U cZ1VP Y{1D#+@Z:oF`"x{z`\(4CɅtipi lOn>zKYMŠbtszsۄ@Hu!QL[=H3\HXZhU$܄(aƽ3Zبk>Fny./xC +`'細e,S2⥽3JU,tzkloź<{W|ԟT#y)78-VL`8_&]>p6ͦ OtTq~} %: f0Iھou>Qq7} x6IKg+7M@j/gE&^vAk X2TS चQmERwolyAd63C)/\Y0n[:Uf|M5Lb0~9vu\ʵH⋴~Ę=#\]+f!fp B?_—.64d՜M2!毗@c~b)b!A/,kLiw+^l!a JhCN ̒IǤ*%?/C:xRJ^=:ML2ivĂvA=hEsI3ep +xӂ׵o378q.9j'3ڧ9rsaAGPa% KI^|4 j|WО)f',CRѵSƵ(%5ᓙ'{QN60;.ƷWTF/VAܺC4ä5\.~fQK͚E'=S#sM"h &)*`퓅T(Z0L&o=ekh,nMvtȲ`d /([x W>dXŚ~*R ``"睮^Q7du~!pTRc1"p9k,<_ߧɍ@R8& / ~_t'~`UbUbUbUɋܸ?,EHzR<8"DZB 9J5ϥs~O t)iiGE J$_/RHW>}wo)Y啝9ua!;98sO1m@[Z!ḯUEC,FJq-LJaL`=0N5WԣX=3sRt^tèd-89nD5jX7԰.ȴ;t 5l7P_=$n1\ ,*՘òրKv֨XB4I&χe細1,[?{}u`I#Nނ%8_엿fV{z8l٣%Sm:|&|;o'wbYI91 aU`13X UxZKCBߵhX#O/4, c]Uh"_@j|F$&OI]JoOOAwt %û}1̮w \?NΒt̆iy[,A?Oܤ6xKnaߟrOڼÿl{9M 7tV64@hޤ˗YՏ˿2޵>q#EKh%e.٭sL"i_35(yJr*_$r8n45uG|<OO1bcdo~OmyMJZM/sd2%$p$Zէ?>Ճ{鋳[{hTSݱŧGǣo?*=A[nt\М].|*PUDv8/~3'a>*V![y 'Z&kW޻1Duޑf? QJ- Uj_fJWvuK?r ¸2͏GMpF;_r ϦQp)v5dPz5ZVG8 w3GrT^l>:AqE?\4Yyc}w_Vios: ʅ?cSeexnY5|z2[ud*W1E%Lgx_uwsԓk_,+>?EkfY}!XjOu nԤ.VC׃Jh21xBC4WWU@Vq*I6oZ6Ոr&֓s˪6:sF6*uwӼ-J,*|m4GyӚ8ثGM/V1yՃ]mogNL%Z6FfzM@i΅#\|}ի{Kʛ2(Gum;"7siےפx͍+'Ň^ZC͔J72O;_{J,STF*|1h2`9ˤ]9f y_wwf5>N;qYUC'Ku>xnᚡ%GlviEQkFTIQVa(ċ9 _1'9:zsr2rѩ=T3T: ʂ f2V$4)rWF D7& U2wE[^y(#:czEG) W(yU~5DQ ^ݐ8D1gXgcJ NiװhuGjqL>[d eEUDRGKfQp*+o4_jŘKã[3NlȮlVKJ͛h*ginR{DJ3VB嗷bp(Uj)TsP/WeVa[7Eʀ*(Fnb/`oU*Xs6F5챿TKs>*y>wG X5Ki# ; l4$E[$9"DT BVmZؔ -xg9e3?2½C[2,2DĐ999yCkOT6{,RԡۮVI}ރCGt6RA437x`"i,Tֆ3צKt$:tM{ۭ?yCbl y@JQݮ?en4tgUT5mEkVؔL;CVXU/5ϲ6%bds-SL甤B# V@e%B8B0wVtRAgJ~B?MWG+9BdmtPDJ̛D 5-';HQ$wK*f)wʻ9E_fws_U˾Ȭ(F)G(pz ݼ4߅J;[#ܘ4;_n-֪ۙ6V7ZA;em}3ps7ZmQK;e[.nAFw̹%t>Trmg'Kņuaș(W9; Sm7 i<`;(rUS^'6b/狲?_}Vodp.W 8-@8YwBNi< ˳4Z^29c76<}aXF]346 h8i|X@LAe"Cb "TսhtƁc4ڲ3:혡&:\glT{ت]qtπdqo '2rc<;K{ $E*- G,T1Ȁwkl[1IQOPiR%!& trC l>Q&Θa|w5Z _Y=@w'꜄sMG!"ƕU1g=+Sy-(&*^FE ur T^~oNJ&N5I 8pTcgo2 V]@CW]em ёPbh8X!$9+&;C!!O* l1D*DHXW) \ Afh+@Ψzۜ^VgKge͐6&]|=H Ⱥ㛣CÁ&["e>"x@vx6Y[g|4KfUpQ):Ukd9:f8Cs֔" Nf8WTC&|q:!2 JdU~yan:bY`$[4~R VOw9X) -i-U:,}z$87 5ʼn4)NVВc`x*XCu9[Id<0{4aŒ$-% 1RJB7kK>VV) Ӵ9S׳R{hiKBDOStpZ&E:%t3WM`$I08LQ0 ’H%-hrGw`OJw. ͛ÐsDE>vhD77w#sm<B)씜 Z!M$Q%J.A/_h,i \B]h(g']Qэ!fйl!A b<$"\P"[9- G(pAmiPR+b6$*N"J/햛i~bCxE8Y .8a,Mt8(#7rƼ ,4/*1X5kD} |8`v(zJc,ΔϏ\: ݛੳqu rrHEQG^PDu(Qھ SuW6U7wIFI_]q8VE8Uw.pԟT׀ ^t=M2a7SnvUo`#;^mAOF`ʦ+0b_QwL\znaYp՝XY^>=qM&i+1].f"N) \'rfdxkיђ~($Sn~ &Ǭwŋ+_:#Dor'dJ8V{]?w{;ۮfZP4 h.oe~Ѐ.:w}6/8#nz6*+Po *}ؙݵ|6Dal_ens)O?~IteІ,|.)]r.>f|Aj 1ȔRJs컁oa M@ Ff97KqY+k0jK$cm<[q!^\ /dۘ$Zr&ۄ&8jXtd ޒfm9 ,l󟨲~9c_Z4KI2AȨksdrd8#S}aF!..8'dywoes-1 n>mdgE;jB-x q‹S!wavWn#~.*ѿwW{p.piBB)O2!A x*+Rs6zԏ līťgѮ!ԥ/i uxr9R6a]Ƀ_#>\vz1Z+RQa9B^vZeVc~y<[[sD*ʎ9[e[3Pu*楦SH;Jt0aU͚K"N г|0y*ǧ/T$a2%_>pxb ~IswN/:s3_ҩ8E^fO'_8Em3'q&x&%Ԏ_ԋmxqq6=_ k~A/h3|@y=~QgiwU7Ƞ)Eq`sO=Bk%g[E 0bԘHB1,{J(q1r rR >d& sc xpxԙtEHT,tf$eM>CDAGS)K-@ztk^(^_nF Xq6'DȥQ,I| %-Лٻ6Dh*ΰ LIGѻ/ޠ B/B^!v7ֵV|axI}d=0XF;D;)_cz=׻nS >L%0Qjb; f?|8hv{yj;wLM -Pyؾ`,Nv{#ͧM:=ecG'Xa/?}u9cx3:Z]~i'm|_V_?$lRIiIj+F?xe|tZ#0< L`gLJj>y/Դt+|'֦ 89,>9] ]:^p;cXO>F+cQIck:Уm& Mç(I¬ގ>(Tmn,fbPܘ*B wmo;=v\$y&_Pn7} WP,\̒n%ek=_ln=R3yRmzmoz4إTXq}bM}bM/Qv,#&h~ &Z~w߲N/LX%mStibOJ dڇ[\%xzapZ);,}9͟[hW[`4Wn6Jum( a<՗wzoiTSk7{IXWfOT,RX ^zdۇe4/-b).ImxiΧY9!:a/I& 0Bב6j+%/6PAbQ2KZw:ǯWo"OHx DJ^>xA #ܫF Ϳzl ű0:^ Do:vG-/\Yɜ\\~6S aBaQK;f_hhxD8z_zxz +KᙋM!Cm#;M]ik62«䓟&&THJjW$kqJxzܼwP;>Z# aJddTFGDjsy`{U|&|c%WܤJX8\Zq>3 wuU9 9’&X cJ9 +:%+,&Xw fڂK&䊘5I[wfct,)SUx0 PYi3̃ @mJfgb-(B ⒙π_xκxX058'\>ڞ'ȉVuR it>ӘzeCz+mJSȡVg24;ct&qVZH,9SEԕLɗMQ"/뫽RUiMQu)q (몣f*ǂ||@CYggC$=˄bDDP, 0FAy^_B9m0E\Dz" PiJYbum {َ}eG P{wO7gT[ɲ(@{1~; <:z 6}T6a;5C M͙c=:5ڢN%f1Cs|4diaoGcf-jien~58#tb nTya*H/2T+clɴy9uKiP$sr:ipI=MQZ@0`&p:|>42Ж(<3I!!;Z,؃f{ʦoٌG? purv5/U390BnTZ,?fS[-v!o*Bu'RU$ƅ ,?i/I{j%`vpQR=]}R֠Ss }݄NZwY锟{Xԋ9W^2fO%CspcfN˨PW'Jo$d"M>bJWt(6b0gyBT]w̌&Ʃ\/YD{ÃϬ^yl^y#bOb.l7˴6͐d+ ]q@jC*+{n1>z_e6-Q8atz1-bkFCg;8*t~ y:}!Q<Ɗvz1+O_Eल8^ldp^)^#\sPϲ14ZQ՜ɚz,K|tYTSn5 E7.NQ7 ܓn) =z?pFhFAؕ Pe*'Y%Oz%0AKөVJXt7sy[*k]st1r[=Tu%b v^'R]OBgNT;.B1!UUXv@ZqQ 4 6^'R. )$ k Xn*61VX!zꑄ?R ѠОJ5/ɨ pIuSoEy+u_dEGg)Mܣݑ>z@ ^i֘yHbrvY, &J̘"PI-+P@c˃OZY6(:|60ޮӽ%e9 Jʪ.Z"P 57ʠыuٛr);;vhM MRt"DŽeX]/5YDUB捣cNsrX j -`]ٍǸ`bu(ܹ͆V{$$@WM|&y &<kh>Ptz @tN܌?s mw8"<9H]i,b9OW^W%.?i .K;n寅QNd:iڨǾ,XoJv3z`)e͹Pi>FDeEGǰ:x0^VoN=5{} XZ^HAHZ 1#:ئZ޸FqSxPؑ+#3V&`7m$]iv'Ci</MEPZ8XM{ u9A5:*ˡ/B h " yc=d˻^| Tvf~f4q悬4B֬2omG]0p \R\y3gZb5`b2&oZ\in@Ȧ,hPcrJP֐w ڙwbhSBAuI/Zma7۔ȂeZA 85A%YS521' >Go2^KV F!킠}[ N?4c`-Fh{F @:c3gCcF/bXn{`-egI+9#ɲ/hK[T!V!Iiك.J1%xQ,RIU}.ЂdI)F 4\(yu0Lu4#@B^#Q{F $m3gģc ^8'ٻ`%b YT*gr"ZdE$j령>22[B۝㣡ч|?h,HXjF. 03b`=sFt/ŗjψ̞gϝEPg\_S[]HLxJ\9ȃ3 Q`i|pj!|[[ p q^ɱxNmI"1 @S#hٝ, Op[M%#vk0:^# A7Ϝux!%2^1hdi6ĒB[Vj"3h>zxn)2ލ7zN7|?=Jgn-/P4&Ÿyz̾MCBjLS/rcDZ3E]J[+PwoGU+*#lvv??r-Z@`wW2[tY,ڎ- i6~n { Xs3]Մ~۠K+s6Fol:|M`03ԒuHy1^KDr˫i|>j5G߉l%m|?v+GV[ͭ:{ygPDNXV9nvaVQh},V̮JU1KےHur߶9߅\2UZH C :o,H-v罰jއ^X:Mk]WKN߯u-YМ]uzd뽭Bգ@~ƍ&/(>t" Q;茖KuMzu2-&]/J}a?MGl첱#n"/Aj3@p,lETD_{Ej7um;Jfjq)%$5OHzo15*=IoWĝ Ar\ptY8מzW{?΢WwY&~vx9wz=Iqj:EBoRzb"XW̜O0,N&47GzZ efG+?G ѵg|4/[*ߋնNFE ?kO?1}>$V*]I=B{#hZ]Yn:J[)E돇So8P'D \ЁoN6xÄ)|λ Gb șZq0$^pT)G:V h (1圓 V*=Pl( B M961G֢zcf'T7Tsi7oZկS-8ykjY+(9CCK/αN%\BYFy_{р؝1l*2=H/ĜNG{ѼkZ"RtvS5*H_8.Q!?#feދ.{0Vcfu! ]G&.h8XՕ;xqZ_ݣq׈(_4S׼7w'nuG@LKY(L,d?YGXdxq=ݮ$IQk3Ժ_4OG 釦<@v#H~facZKaLlur1΃o/s/7>m<5/~tV,'FiAA& jšAcWW%]` ˾es +%k1wk Y?1(@ec4`49(gX b cU 6fX?.tM!n-o/+<u"`.qt1Eӿ 57m]@fXHf??'&L1C)/,ڹ3negH-I`֗%i=FИdpN;ƨ$Xqޛ] ;J{'q^ n8LJAq8tӍi7ѽvw rpŃj_ oOF5 Mv[ԽbJosAf_5OP+^DHDyqﶮǂ{-$vw{>13  92D֪*S< V?D! d wNx$Sو43M'GyyL߽:- ZҦ}7:%!#ph.5\_NI}y}{dd7Z@~Ǩ~ujŹܟ{}*o&\@`PښLBB]#Z{ 2k#^e?qI%jZNbӃH]s6Wenc<@Uhjwc˦\ ڕ-E8H23H|H,SdF@*Dg kC1Hͱ EL,uE9꼐2)da%;jzwa™.$cUNkL ǥn}k,囇7}Ŗ8Nǿ?cg_?+#Rz]8گqx+F,+'?ކ^ ޭJvʏqWD3m9(07I So纭B { >} ~RPmO/q,` Bz&|A2Z4SHwcA^,"iAte%&G#n" |}u \"zl@\ŏ=uOh+{S(b ]Ec(Kur1;0"Ե;;*;?}<[xxy  .s0x9Ch~: I8m /?}6pon_y#QqM{??vŷw?Z.52=Gd4dumQSyxٕn]O~_Ĩv,RlkF.og{ywKuݘZ~Pr1z񘑴A7֔GėKQnngcSL){_WdKk{|g&O &@U4KjO7 D[.).m:+ʴ1ں'%Z!4䅫hN\=m^<떋A~u!'n7m y*S"1V%뙭wHDFuz{MԞՇǘx1)|}u_ה7vy<]Sfhʚ}T5Fk~ Iьϭ=N>kRIK[KU%T+}ImϑU"i$V_pUIРK_A}a$FIed J]4KqOU~~(νzjDW"l ܨNÍ긺ٵSnVj]<}ZǸIRFb Z•R`͇[nI$L^|И r?/UA.'zU0N Ü:SXU<`YuhbRްӍyAW35ȲVЪEI tc|ZswO*{Ifvϣ=9$j),)t%>:6ϟtgMbEhKޫF1u7|ͬ_gYfjr43D9H%f| &X'08` T'y#ia:m+$G6IQUQUlg{YpTHVjŕ)_xATJ(k+j!.YI5I"Q|ba1FOZ(ZLO&lv5jtv*}^@>poI&gtHceqH.(E=p mkt+- M YK?" m)+IXm{azi ;AP/9޽pp듘PhPiI)#uz.Z5׬C;XHBΚxhVM +5Opǘ EK7M.L:rcFp1 xz8 9 v KA?N\(2͏=N+]F/d;nPKp6Ή >f/Lpv?:#rjNC^Z8yYxg셴{#^&-Io{]w'b9˜12M@H] d;mW PZ<:9\r Páo;b{EߟjA`B %yY(j;P4{bt@`IT)$8-ZYK0Ei6STbS((8]_Oʲz_5T? T?sZ~VdgQ8QC,]ftG :tc-*ÑA5wЁ^xl#WSXOڟxkgMNؿ:ҿz:<ʔ׿:ؿz y*SgSoY7dmSZL T'dWim [vpu@C^f"8u EurźMy@!h-(0Ѻ !/\E:X-2xVE޶ oh)Ƽ$[F6H(Aq.&ݗȿۊȿK^k i(sF& `xT…O2 )W&C%Q ^c-8jr&Xk=(V[ϥF`8ɽI]V$ؖ~U 3WNsWRB&*4C"AR@ee%+EE73SQL{|+?6dhʱǒ~l͂_6^zeMN\?}'%=^&cՒjI֢%TOt<&PY `u6 9TK+_F9tkg$CQB-Il@JJ;} A@]9˘^Qi͞O %t.e12Zt;U4Kj\7m&,r1Hʞ.mBIiEƳn m y*_):oQRE}T5X3uuh)#c{%w]7$idl|nI |jqq^y6,1p{S0-H89B]~Κ{]5j @E88byzYퟛU^$K3-epL.V)9zqUt{ |A|Į (S,lDڷL%}P;DΧɐ,ͧh#LL."3#2\IhkdVq?-ƘM .W_G|:]\Ôe-yCZ#\a{o) w} gsT+6ܼqQ &X8~1'=??Z?\fsh /.] eAQ;Yx\՛9&|et)I'L45^&tseJuX"o|U%Hm6۽X"lI&L)hlQO "BIlJob= ?38 BEQ7gpW(^.ڹ7u'˗B qI[~ȃտOp6#Pg9A/G9ۧl<0k&i5p묙+{RLَjd$wCp #꽝wjX==Z&cN $*m[Ÿ>Tf 86ImG }c:Dp`IDG=ѓN 1IK|U/aF$,A~dt$#f;;7vAiR>k+4څ Ui38 2 6Hl3Kd)@)) b+@(RVKRRE#,KrՔ>kRcxNՔWMhg(,Rѳ5|OX$*8ZOQ*p+I/L`'MЯ vқ@WH-*P01sTJϑ^1e[_ KdwgMQr}Gm}HR.VZ 8 -GYjTkDOV(oMaKE'ZG2TI5[՞bU%,bք2L,)gCژl|ݧjuUlF{$Z~N7è93l19"fajvjN#O[(yЊQ5\xu-镱Kc}+n8wq@ buY0ޙ–Tz{T[UXKy:^jE,0F|W}Z\%F5_h5ZzsEP'*MH'Y6OݰdC &.x=٩d ]Z]堐ZajOD' EgyHW<%c_ԇtzzLNqZ5K1:fbܛ0AY9*qC gv%ѢWΕ(t̸+\ XŞ&|$̷E GV?". [Ҋb鈥qްWBAI44+J2+uz %Kd>mйd*rN5#2ȂR* oLiK[頖 s`Ԛ~k>GS(qk?Am-@rDu"zI@fGKcRr: BX,u* %~n LӤNQJL~V;TG/(]KԙNpvDS SznhÂ$$q2XKNh:˴J1&0WD1qiHcP΀D.A b5ѵAJiD⠛Qͭp`zC+^G7?uJy[|wQ#E|5]]#_`/ ! LO-S{&dLy=z!T#xoTGoo8y#cu7q@1,;F~Q.U!x[l;dC'Ta(o jaEl4IR"ޕ6"-XHQÈ2fmm؆,ϧؒGl̋V], ݈<,2$iZAH1,;%AJ 閍ڬZ1{vG[j'F+ΔPbK H9EmZɚv#isk(DFV-<ɪ%DKY&qW2{%dh+ 6yaF->֑wϡV> cѴI]քZ==e8ܖ/ǧ.T(~R\B/0~TL } pCjE>-noIjї, (@fhxt翬B&DMP6MN/mh.Ҟj!㻤z;@#"6un ZmWHC̊N Ni}걤ŭ_ kl1>߾ϣoXw.=Dӭ$ڍڃ]YۺH78>GƐ/sPoF15iV 5<52Vra/F|(%Wb|o7;)V> ?Q(gwo jd%h iXH?Ҥ_u4Dz,|&/yGn =I| ` XY "/e1$\/j9JDH mY:eerXKnxo`Ԏ"%g3畩 m`ƏLS `83PF=9i,^DO̜ZqZ4c"sۧ(HtxLPn3#/tt`K@H)d%3C`4 ӯdFxf1 4-9E\8gsh:5Ǩ %6fv=5ڡQ˃xA;R03 aMU|'Ӆs͒)ow2Cn<wh]:\MBn]H;,jj7iyɃw$e.=ky 6 y"Z]cDlYKؘ b1&+(}m"#b9 M @:9L{=)to5Y<>?JW!21x4Za?4SDTcxUվ 9IK=)dKe3^8yv 10905ms (06:46:21.537) Feb 03 06:46:21 crc kubenswrapper[4998]: Trace[2070395433]: [10.905378113s] [10.905378113s] END Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.537116 4998 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.538753 4998 trace.go:236] Trace[85935600]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 06:46:06.561) (total time: 14977ms): Feb 03 06:46:21 crc kubenswrapper[4998]: Trace[85935600]: ---"Objects listed" error: 14977ms (06:46:21.538) Feb 03 06:46:21 crc kubenswrapper[4998]: Trace[85935600]: [14.977651626s] [14.977651626s] END Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.538812 4998 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.540986 4998 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:21 crc kubenswrapper[4998]: E0203 06:46:21.543428 4998 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.543886 4998 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.544547 4998 trace.go:236] Trace[545716416]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 06:46:09.055) (total time: 12488ms): Feb 03 06:46:21 crc kubenswrapper[4998]: Trace[545716416]: ---"Objects listed" error: 12488ms (06:46:21.543) Feb 03 06:46:21 crc kubenswrapper[4998]: Trace[545716416]: [12.488797217s] [12.488797217s] END Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.544576 4998 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.547181 4998 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.560771 4998 csr.go:261] certificate signing request csr-t2s2t is approved, waiting to be issued Feb 03 06:46:21 crc kubenswrapper[4998]: I0203 06:46:21.571826 4998 csr.go:257] certificate signing request csr-t2s2t is issued Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.229167 4998 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.229352 4998 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.229379 4998 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.229434 4998 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events\": read tcp 38.102.83.129:58946->38.102.83.129:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.1890a9a119657d45 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 06:46:02.937515333 +0000 UTC m=+1.224209179,LastTimestamp:2026-02-03 06:46:02.937515333 +0000 UTC m=+1.224209179,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.229579 4998 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.229668 4998 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.359282 4998 apiserver.go:52] "Watching apiserver" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.366955 4998 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.367374 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.367856 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.368080 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.368291 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.368493 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.368477 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.368538 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.368846 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.368960 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.369117 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371052 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371117 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371299 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371414 4998 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371443 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371540 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371640 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.371772 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.373477 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.373765 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.384757 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 14:13:44.384931992 +0000 UTC Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.436581 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449330 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449459 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449506 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449532 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449556 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449579 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449602 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449628 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449651 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449674 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449697 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449718 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449742 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449764 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449803 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449825 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449877 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449902 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449927 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449949 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449973 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.449994 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450015 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450036 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450056 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450182 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450241 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450267 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450291 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450314 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450334 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450339 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450355 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450458 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450491 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450516 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450540 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450564 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450584 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450610 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450633 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450650 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450657 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450759 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450796 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450815 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450840 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450859 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450877 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450895 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450378 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450912 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450425 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450931 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450611 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450763 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450949 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450813 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450859 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450969 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450988 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451009 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451026 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451043 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451074 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451091 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451107 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451126 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451142 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451158 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451173 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451189 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451208 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451224 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451241 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451258 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451275 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451290 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451307 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451323 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451337 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451352 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451368 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451387 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451405 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451421 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451438 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451454 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451472 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451491 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451507 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451525 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451541 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451557 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451574 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451606 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451623 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451642 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451671 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451688 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451705 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451722 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451738 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451754 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451771 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451811 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451832 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451850 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451865 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451881 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451897 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451912 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451929 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451946 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451989 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452007 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452024 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452041 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452075 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452093 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452111 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452130 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452147 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452165 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452182 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452199 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452215 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452235 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452252 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452269 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452287 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452303 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452322 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452339 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452354 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452372 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452390 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452409 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452425 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452441 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452458 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452476 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452493 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452512 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452529 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452547 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452563 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452580 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452829 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452857 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452875 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452891 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452924 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452944 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452964 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452987 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453007 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453028 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453067 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453093 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453204 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453237 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453262 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453288 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453311 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453327 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453343 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453362 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453379 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453432 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453449 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453469 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453488 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453516 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453539 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453558 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453575 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453594 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453614 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453630 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453646 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453663 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453680 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453697 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453714 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453751 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453769 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453803 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453821 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453842 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453859 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453877 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453899 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453919 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453937 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453956 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453974 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453994 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454013 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454057 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454087 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454116 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454145 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454179 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454217 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454240 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454262 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454285 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454307 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454330 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454348 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454370 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454388 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454441 4998 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454454 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454464 4998 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454475 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454486 4998 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454496 4998 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454505 4998 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450868 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450894 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.450999 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451074 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451146 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451279 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451310 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451326 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451464 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451463 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451504 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451659 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451687 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.451913 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452071 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452076 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452128 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452258 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452298 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452443 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452502 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452565 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452587 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452721 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452751 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452767 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452986 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453160 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453267 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.452943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453488 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453497 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453519 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.453797 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454124 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454186 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454331 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454382 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454506 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.454613 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455085 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455190 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455219 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455231 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455328 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455439 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455447 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455571 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455583 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.455981 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.456186 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.456363 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.456404 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.457705 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458006 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458154 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458171 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458186 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458262 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458524 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458598 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458638 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458668 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458747 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458861 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.458904 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459002 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459067 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459305 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459320 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459320 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.459347 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.462687 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.462796 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.462947 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463000 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463349 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463380 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463399 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463628 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463848 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.463925 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464028 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464094 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464302 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464308 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464381 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464251 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464493 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464617 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464704 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464725 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464846 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.464995 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.465187 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.465112 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.465529 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.465549 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.465800 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.466040 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.466354 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.466446 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.466886 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.467538 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:22.967519741 +0000 UTC m=+21.254213547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.467749 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.468017 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.468191 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.468227 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.468345 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.468887 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.469153 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.469737 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.469226 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.469932 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.470054 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:22.970034975 +0000 UTC m=+21.256728781 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.470160 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.470408 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471125 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471145 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.470343 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471168 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471126 4998 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471391 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471582 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471622 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471647 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471691 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471720 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471858 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471997 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.472273 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.472289 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.471709 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.472600 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.472665 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.472846 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.473584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.473673 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.473763 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:22.973735304 +0000 UTC m=+21.260429110 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.476754 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.477306 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.477571 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.478186 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.482551 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.483249 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.483490 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.484962 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.485135 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.485448 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.485464 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.485478 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.485528 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:22.985509021 +0000 UTC m=+21.272202827 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.486066 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.486101 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.486114 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.486170 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:22.98615226 +0000 UTC m=+21.272846066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.486465 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.487920 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.488017 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.488073 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.487967 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.488143 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.488377 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.488567 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.489098 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.489131 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.489862 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.489930 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490540 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490577 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490605 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490695 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490887 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.490998 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491062 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491195 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491452 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491503 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491597 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491878 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.491953 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.492075 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.492152 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.492904 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493007 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493163 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493257 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493415 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493519 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.493711 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494023 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494091 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494136 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494402 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494498 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.494829 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.495282 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.495380 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.495614 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.495622 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.496521 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.496811 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.497103 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.497552 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.498405 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.500049 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.508431 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.509680 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.519252 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.520138 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.522314 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.529139 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.531093 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.540990 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.550063 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.552379 4998 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.555744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.555896 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.555952 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.555912 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556198 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556276 4998 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556335 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556386 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556442 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556498 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556560 4998 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556620 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556675 4998 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556726 4998 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556796 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556850 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556900 4998 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.556957 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557008 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557063 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557114 4998 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557168 4998 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557218 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557268 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557325 4998 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557412 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557468 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557522 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557578 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557628 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557676 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557738 4998 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557808 4998 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557862 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557912 4998 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.557964 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558024 4998 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558075 4998 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558131 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558184 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558238 4998 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558293 4998 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558349 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558424 4998 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558776 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558879 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558935 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.558986 4998 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559050 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559104 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559154 4998 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559207 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559264 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559317 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559371 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559428 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559485 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559539 4998 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559594 4998 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559653 4998 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559706 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559757 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559831 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559885 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559937 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559993 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560043 4998 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560092 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560146 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560199 4998 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560259 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560321 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560375 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560427 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560481 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560533 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560660 4998 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.560755 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561172 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561242 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561296 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561345 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561475 4998 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561531 4998 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561866 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561922 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.561972 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562033 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562118 4998 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562170 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562224 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562289 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562339 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562398 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562451 4998 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.559700 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562506 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562643 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562665 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562681 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562694 4998 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562711 4998 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562723 4998 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562736 4998 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562748 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562760 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562772 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562800 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562816 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562827 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562839 4998 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562868 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562882 4998 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562895 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562907 4998 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562918 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562930 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562941 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562954 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562968 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562984 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.562996 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563005 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563015 4998 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563024 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563034 4998 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563046 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563058 4998 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563071 4998 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563081 4998 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563090 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563101 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563114 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563126 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563138 4998 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563150 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563158 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563167 4998 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563176 4998 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563185 4998 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563194 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563203 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563212 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563222 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563233 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563244 4998 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563252 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563262 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563271 4998 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563282 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563291 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563303 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563314 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563326 4998 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563337 4998 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563349 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563360 4998 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563372 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563384 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563395 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563405 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563413 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563421 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563463 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563472 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563483 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563493 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563502 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563510 4998 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563519 4998 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563532 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563540 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563548 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563557 4998 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563565 4998 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563573 4998 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563582 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563590 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563597 4998 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563606 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563614 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563622 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563631 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563639 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563650 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563661 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563673 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563683 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563691 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563699 4998 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563708 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.563716 4998 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.575593 4998 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-03 06:41:21 +0000 UTC, rotation deadline is 2026-11-08 04:29:10.938026465 +0000 UTC Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.575658 4998 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6669h42m48.362371648s for next certificate rotation Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.575870 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.593201 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.604280 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.612857 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.623568 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.634294 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.647446 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-plgnf"] Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.647904 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.648732 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.650609 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.650892 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-crsvv"] Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.651246 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.651337 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.651832 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.663795 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.663882 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.663810 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.664592 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.680545 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.691600 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.697582 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.704345 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.710824 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-341533099ef1d4d3c0ebba4229c15e5f8133fb632ae1fd117b27f361988a904e WatchSource:0}: Error finding container 341533099ef1d4d3c0ebba4229c15e5f8133fb632ae1fd117b27f361988a904e: Status 404 returned error can't find the container with id 341533099ef1d4d3c0ebba4229c15e5f8133fb632ae1fd117b27f361988a904e Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.712082 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.724939 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.754184 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.766119 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-hosts-file\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.766171 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfq5m\" (UniqueName: \"kubernetes.io/projected/16316a01-0118-4b01-81cb-13e869b62484-kube-api-access-lfq5m\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.766189 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/16316a01-0118-4b01-81cb-13e869b62484-serviceca\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.766222 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z4jf\" (UniqueName: \"kubernetes.io/projected/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-kube-api-access-5z4jf\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.766237 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/16316a01-0118-4b01-81cb-13e869b62484-host\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.782961 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.817617 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.851564 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.866928 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/16316a01-0118-4b01-81cb-13e869b62484-serviceca\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.866983 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z4jf\" (UniqueName: \"kubernetes.io/projected/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-kube-api-access-5z4jf\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.867004 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/16316a01-0118-4b01-81cb-13e869b62484-host\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.867104 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-hosts-file\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.867033 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-hosts-file\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.867188 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/16316a01-0118-4b01-81cb-13e869b62484-host\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.867220 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfq5m\" (UniqueName: \"kubernetes.io/projected/16316a01-0118-4b01-81cb-13e869b62484-kube-api-access-lfq5m\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.868373 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/16316a01-0118-4b01-81cb-13e869b62484-serviceca\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.897441 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z4jf\" (UniqueName: \"kubernetes.io/projected/e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1-kube-api-access-5z4jf\") pod \"node-resolver-plgnf\" (UID: \"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\") " pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.902989 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfq5m\" (UniqueName: \"kubernetes.io/projected/16316a01-0118-4b01-81cb-13e869b62484-kube-api-access-lfq5m\") pod \"node-ca-crsvv\" (UID: \"16316a01-0118-4b01-81cb-13e869b62484\") " pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.945895 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.964144 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.965143 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-plgnf" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.967886 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:22 crc kubenswrapper[4998]: E0203 06:46:22.968040 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:23.968012936 +0000 UTC m=+22.254706742 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.973445 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-crsvv" Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.984898 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:22 crc kubenswrapper[4998]: W0203 06:46:22.989877 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16316a01_0118_4b01_81cb_13e869b62484.slice/crio-59805788575c32996677efbf8a59016147bbf33ed3fd40c7114a50e98513592a WatchSource:0}: Error finding container 59805788575c32996677efbf8a59016147bbf33ed3fd40c7114a50e98513592a: Status 404 returned error can't find the container with id 59805788575c32996677efbf8a59016147bbf33ed3fd40c7114a50e98513592a Feb 03 06:46:22 crc kubenswrapper[4998]: I0203 06:46:22.998024 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.007432 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.046344 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-v9x5x"] Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.046820 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.050581 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.050632 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.050591 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.050865 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.051010 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.059916 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.068809 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.068844 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.068867 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.068887 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.068931 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.068981 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.068994 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069006 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.068981 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:24.068966319 +0000 UTC m=+22.355660115 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069046 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069041 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:24.069033301 +0000 UTC m=+22.355727107 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069073 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069087 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069138 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:24.069128514 +0000 UTC m=+22.355822320 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069136 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.069218 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:24.069197786 +0000 UTC m=+22.355891752 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.072184 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.082984 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.099547 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.111165 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.126984 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.141341 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.158549 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.169728 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-proxy-tls\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.169766 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-mcd-auth-proxy-config\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.169804 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2hj2\" (UniqueName: \"kubernetes.io/projected/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-kube-api-access-x2hj2\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.169838 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-rootfs\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.173689 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.184185 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.193586 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.270943 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-proxy-tls\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.270996 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-mcd-auth-proxy-config\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.271020 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-rootfs\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.271039 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2hj2\" (UniqueName: \"kubernetes.io/projected/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-kube-api-access-x2hj2\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.271156 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-rootfs\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.271770 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-mcd-auth-proxy-config\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.276408 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-proxy-tls\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.290954 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2hj2\" (UniqueName: \"kubernetes.io/projected/da1f1740-2fdd-4e7d-a740-039b8d39cfcd-kube-api-access-x2hj2\") pod \"machine-config-daemon-v9x5x\" (UID: \"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\") " pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.374109 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.383385 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda1f1740_2fdd_4e7d_a740_039b8d39cfcd.slice/crio-2dad21f5c30337311a8252d9827906bfb8ece9b1a2b4c283865160543663631e WatchSource:0}: Error finding container 2dad21f5c30337311a8252d9827906bfb8ece9b1a2b4c283865160543663631e: Status 404 returned error can't find the container with id 2dad21f5c30337311a8252d9827906bfb8ece9b1a2b4c283865160543663631e Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.385192 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 18:38:42.076612341 +0000 UTC Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.405402 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-dm4vz"] Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.405731 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.406073 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-5qjsn"] Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.406739 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.408538 4998 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.408596 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.408538 4998 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.408635 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.410056 4998 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.410096 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.410350 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.410600 4998 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.410628 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.413516 4998 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.413570 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: W0203 06:46:23.413633 4998 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.413647 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.420977 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.428772 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.437769 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.444892 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.454947 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.469766 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.499602 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.511220 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.519433 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.526695 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.541052 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.548161 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-plgnf" event={"ID":"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1","Type":"ContainerStarted","Data":"0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.548300 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-plgnf" event={"ID":"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1","Type":"ContainerStarted","Data":"bdbdaa234751be1d7c2f65fa7a842acdaf2d5461c1a09246511404b6590e5afe"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.549528 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.549554 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"341533099ef1d4d3c0ebba4229c15e5f8133fb632ae1fd117b27f361988a904e"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.550811 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"2dad21f5c30337311a8252d9827906bfb8ece9b1a2b4c283865160543663631e"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.551746 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.552356 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-crsvv" event={"ID":"16316a01-0118-4b01-81cb-13e869b62484","Type":"ContainerStarted","Data":"6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.552406 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-crsvv" event={"ID":"16316a01-0118-4b01-81cb-13e869b62484","Type":"ContainerStarted","Data":"59805788575c32996677efbf8a59016147bbf33ed3fd40c7114a50e98513592a"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.554150 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.554177 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.554186 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"49a610d205c4a695c1d711fbf97d41bfc8405996ead4ffa68affd35a336298fb"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.555278 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ea37f086acb61eff1ab587d2987a53e139fd64fff96ef020b3c4251f50cfd6cd"} Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.564101 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573758 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-conf-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573822 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-etc-kubernetes\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573845 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t8ch\" (UniqueName: \"kubernetes.io/projected/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-kube-api-access-6t8ch\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573869 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-system-cni-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573889 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmgl6\" (UniqueName: \"kubernetes.io/projected/e25d5a00-1315-4327-aadd-fd81e45fb023-kube-api-access-wmgl6\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573914 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-multus\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573937 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573957 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.573981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-kubelet\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574002 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-os-release\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574021 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-hostroot\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574051 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-socket-dir-parent\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574072 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-netns\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574095 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574118 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-multus-certs\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574137 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-system-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574158 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-daemon-config\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574181 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-os-release\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574218 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-cnibin\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574239 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-binary-copy\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574259 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cni-binary-copy\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574288 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cnibin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574307 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-k8s-cni-cncf-io\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.574337 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-bin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.578737 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.592228 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.605173 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.616908 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.627769 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.637581 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.649959 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.673682 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.674932 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cni-binary-copy\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675023 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cnibin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675053 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-k8s-cni-cncf-io\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675086 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-bin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675156 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-k8s-cni-cncf-io\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675194 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-conf-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675163 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-conf-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675231 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-etc-kubernetes\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675249 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t8ch\" (UniqueName: \"kubernetes.io/projected/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-kube-api-access-6t8ch\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675268 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-system-cni-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675284 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmgl6\" (UniqueName: \"kubernetes.io/projected/e25d5a00-1315-4327-aadd-fd81e45fb023-kube-api-access-wmgl6\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675277 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-bin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675314 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-multus\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675338 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-cni-multus\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675359 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-etc-kubernetes\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675364 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675391 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-kubelet\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675471 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-os-release\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-hostroot\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675547 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-socket-dir-parent\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675570 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-netns\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675595 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675597 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-system-cni-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675634 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-multus-certs\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675655 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-system-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675673 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-daemon-config\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675691 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-os-release\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675714 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-cnibin\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675733 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-binary-copy\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675828 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-socket-dir-parent\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675871 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-netns\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675922 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-hostroot\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675972 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-var-lib-kubelet\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.675994 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-cnibin\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676023 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-host-run-multus-certs\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676082 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-system-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676139 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-os-release\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676171 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-os-release\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676186 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cnibin\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676234 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e25d5a00-1315-4327-aadd-fd81e45fb023-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.676551 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-cni-dir\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.699448 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.713217 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.757302 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.784969 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7b8d"] Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.785885 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.790183 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.801532 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.821060 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.841144 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.861399 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.882194 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.901939 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.922326 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.978761 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:23 crc kubenswrapper[4998]: E0203 06:46:23.978991 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:25.978948708 +0000 UTC m=+24.265642564 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979044 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979169 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979289 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979389 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979445 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979491 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979568 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979615 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979663 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979717 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979762 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979854 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979903 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zmz2\" (UniqueName: \"kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979950 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.979996 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.980048 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.980103 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.980149 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.980231 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.980306 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:23 crc kubenswrapper[4998]: I0203 06:46:23.987104 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:23Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.015001 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.050901 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081696 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081737 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081767 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081816 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081837 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081861 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081871 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081887 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081909 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081920 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081944 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081959 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:26.081947212 +0000 UTC m=+24.368641008 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081884 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.081986 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:26.081972663 +0000 UTC m=+24.368666479 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.081979 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082018 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082034 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082008 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082091 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082128 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082160 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082202 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zmz2\" (UniqueName: \"kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082209 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082224 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082244 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082246 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082296 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082299 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082324 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082279 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082348 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082379 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082401 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082417 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082431 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082453 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.082580 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.082597 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.082609 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.082649 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:26.082636453 +0000 UTC m=+24.369330259 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082680 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082706 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082730 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082948 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.082988 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.083042 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.083059 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.083107 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:26.083096876 +0000 UTC m=+24.369790682 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.083450 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.086274 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.092887 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.122845 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zmz2\" (UniqueName: \"kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2\") pod \"ovnkube-node-p7b8d\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.152630 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.191992 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.231798 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.268280 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.309144 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.321394 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.327832 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.377833 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.381695 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.385394 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 06:37:20.734330586 +0000 UTC Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.387079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-multus-daemon-config\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.395960 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:24 crc kubenswrapper[4998]: W0203 06:46:24.413348 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7418b1d_9f7d_48cd_aac4_6a1b85967841.slice/crio-bbd0fe9ccb5d93b1e7493e04e18e8934312198958fb7d96c5c2e225dd0a7acab WatchSource:0}: Error finding container bbd0fe9ccb5d93b1e7493e04e18e8934312198958fb7d96c5c2e225dd0a7acab: Status 404 returned error can't find the container with id bbd0fe9ccb5d93b1e7493e04e18e8934312198958fb7d96c5c2e225dd0a7acab Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.427864 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.427912 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.428034 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.428058 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.428212 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:24 crc kubenswrapper[4998]: E0203 06:46:24.428316 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.435143 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.438114 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.438836 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.439547 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.440195 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.441767 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.442366 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.442986 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.444031 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.445066 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.445959 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.446520 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.447727 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.448410 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.449323 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.449872 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.450716 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.451647 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.452036 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.453105 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.453713 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.454187 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.455155 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.455710 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.456795 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.457187 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.458166 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.458763 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.459252 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.460361 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.461055 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.462286 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.462449 4998 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.462559 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.464428 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.465370 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.465991 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.467365 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.468683 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.469453 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.471028 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.471738 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.473219 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.473868 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.474893 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.475987 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.476439 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.477408 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.477945 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.479002 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.479581 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.480064 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.481225 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.481772 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.482833 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.483297 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.497746 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.501937 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.506548 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-cni-binary-copy\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.508169 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e25d5a00-1315-4327-aadd-fd81e45fb023-cni-binary-copy\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.550844 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.559650 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71"} Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.559932 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e"} Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.561036 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" exitCode=0 Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.561086 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.561113 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"bbd0fe9ccb5d93b1e7493e04e18e8934312198958fb7d96c5c2e225dd0a7acab"} Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.581437 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.590319 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t8ch\" (UniqueName: \"kubernetes.io/projected/2cba0dd3-b238-4ad4-9517-e2bf7d30b635-kube-api-access-6t8ch\") pod \"multus-dm4vz\" (UID: \"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\") " pod="openshift-multus/multus-dm4vz" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.596362 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmgl6\" (UniqueName: \"kubernetes.io/projected/e25d5a00-1315-4327-aadd-fd81e45fb023-kube-api-access-wmgl6\") pod \"multus-additional-cni-plugins-5qjsn\" (UID: \"e25d5a00-1315-4327-aadd-fd81e45fb023\") " pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.612698 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.649118 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.691542 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.730878 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.744538 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" Feb 03 06:46:24 crc kubenswrapper[4998]: W0203 06:46:24.757629 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode25d5a00_1315_4327_aadd_fd81e45fb023.slice/crio-ebdd9387f57000a938d6802404a352e4babd14c20e8b958c12cbb57de60bd9c4 WatchSource:0}: Error finding container ebdd9387f57000a938d6802404a352e4babd14c20e8b958c12cbb57de60bd9c4: Status 404 returned error can't find the container with id ebdd9387f57000a938d6802404a352e4babd14c20e8b958c12cbb57de60bd9c4 Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.768900 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.817817 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.851868 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.881878 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.882032 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dm4vz" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.910632 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.950284 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:24 crc kubenswrapper[4998]: I0203 06:46:24.992428 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:24Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.037065 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.077932 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.113395 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.149167 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.189284 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.385827 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 15:21:34.310755777 +0000 UTC Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.515811 4998 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.569120 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9" exitCode=0 Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.569210 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.569270 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerStarted","Data":"ebdd9387f57000a938d6802404a352e4babd14c20e8b958c12cbb57de60bd9c4"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.573252 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.575660 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerStarted","Data":"91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.575703 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerStarted","Data":"0c99b49aaefccd13fbb90d5ca75e6299681a0745763fe9719eb2ac4ac662bd63"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.580420 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.580706 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.580816 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.580901 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.586718 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.612949 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.641226 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.655679 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.673883 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.693856 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.709825 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.723152 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.737324 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.740743 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.750192 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.751293 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.765115 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.777643 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.789465 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.799705 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.811873 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.831393 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.872390 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.915174 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.952418 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:25 crc kubenswrapper[4998]: I0203 06:46:25.989696 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:25Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.002723 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.002968 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:30.002939246 +0000 UTC m=+28.289633092 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.032244 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.076004 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.104460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.104521 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.104544 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.104563 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104640 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104675 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104683 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104740 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:30.104723515 +0000 UTC m=+28.391417321 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104746 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104754 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:30.104748075 +0000 UTC m=+28.391441881 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104759 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104815 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:30.104804087 +0000 UTC m=+28.391497903 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104847 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104858 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104867 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.104888 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:30.104881789 +0000 UTC m=+28.391575795 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.112261 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.150388 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.192711 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.231195 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.274348 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.310471 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.355653 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.386863 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 02:17:53.364212789 +0000 UTC Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.397000 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.426868 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.426953 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.427012 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.427088 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.426881 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:26 crc kubenswrapper[4998]: E0203 06:46:26.427767 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.586887 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.586941 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.589413 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7" exitCode=0 Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.589533 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7"} Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.604865 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.620958 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.635515 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.647275 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.659886 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.682623 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.700040 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.716335 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.750199 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.789866 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.829745 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.874258 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.909135 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.949446 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:26 crc kubenswrapper[4998]: I0203 06:46:26.990916 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:26Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.387651 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 10:52:40.449970785 +0000 UTC Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.595376 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a" exitCode=0 Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.595430 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a"} Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.612462 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.632600 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.657920 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.671848 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.685361 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.697319 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.710186 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.723657 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.735552 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.747942 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.770055 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.787263 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.799629 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.811124 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.821935 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.944038 4998 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.945927 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.945964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.945974 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.946097 4998 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.953596 4998 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.953914 4998 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.954918 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.954952 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.954964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.954980 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.954995 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:27Z","lastTransitionTime":"2026-02-03T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:27 crc kubenswrapper[4998]: E0203 06:46:27.968709 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.971950 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.971986 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.971998 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.972013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.972024 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:27Z","lastTransitionTime":"2026-02-03T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:27 crc kubenswrapper[4998]: E0203 06:46:27.981880 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.985011 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.985042 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.985053 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.985067 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.985080 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:27Z","lastTransitionTime":"2026-02-03T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:27 crc kubenswrapper[4998]: E0203 06:46:27.994592 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:27Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.997330 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.997357 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.997365 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.997377 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:27 crc kubenswrapper[4998]: I0203 06:46:27.997387 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:27Z","lastTransitionTime":"2026-02-03T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.009465 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.012847 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.012877 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.012887 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.012902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.012915 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.026213 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.026318 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.027542 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.027566 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.027574 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.027585 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.027593 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.129508 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.129544 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.129554 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.129568 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.129578 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.232447 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.232514 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.232536 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.232564 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.232587 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.335468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.335522 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.335535 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.335552 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.335568 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.423245 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 23:10:09.994449714 +0000 UTC Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.426955 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.427043 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.427042 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.427177 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.427234 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:28 crc kubenswrapper[4998]: E0203 06:46:28.427279 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.437367 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.437411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.437425 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.437443 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.437457 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.541022 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.541333 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.541349 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.541371 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.541383 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.601529 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.603870 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546" exitCode=0 Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.603931 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657037 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657089 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657109 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657121 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.657913 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.696754 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.711918 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.724110 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.736415 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.749705 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.758781 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.758832 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.758843 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.758859 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.758870 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.760674 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.769662 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.779504 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.790256 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.810921 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.823021 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.834180 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.844874 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.856347 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:28Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.861039 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.861074 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.861082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.861097 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.861106 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.963473 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.963518 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.963530 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.963546 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:28 crc kubenswrapper[4998]: I0203 06:46:28.963557 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:28Z","lastTransitionTime":"2026-02-03T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.069604 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.069648 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.069660 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.069685 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.069697 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.172606 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.172688 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.172715 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.172744 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.172768 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.275396 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.275471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.275495 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.275528 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.275552 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.378457 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.378512 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.378524 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.378564 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.378578 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.423970 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 23:42:41.300070745 +0000 UTC Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.481446 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.481503 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.481523 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.481553 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.481575 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.585233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.585303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.585329 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.585360 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.585381 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.613249 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f" exitCode=0 Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.613325 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.636226 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.659867 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.681939 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.688468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.688499 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.688510 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.688525 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.688537 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.698754 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.710127 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.733078 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.759262 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.773309 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.783532 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.790566 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.790597 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.790608 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.790625 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.790637 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.801242 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.813726 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.824885 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.836435 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.846534 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.855692 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:29Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.894363 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.894405 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.894417 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.894435 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.894448 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.997348 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.997386 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.997394 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.997408 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:29 crc kubenswrapper[4998]: I0203 06:46:29.997417 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:29Z","lastTransitionTime":"2026-02-03T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.054104 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.054336 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.054289192 +0000 UTC m=+36.340982998 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.099921 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.099965 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.099974 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.099988 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.099998 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.155050 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.155107 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.155148 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.155183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155281 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155315 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155339 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155356 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155379 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.15535704 +0000 UTC m=+36.442050856 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155402 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.155390011 +0000 UTC m=+36.442083827 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155420 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155481 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155569 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155583 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.155546415 +0000 UTC m=+36.442240361 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155644 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.155842 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.155759042 +0000 UTC m=+36.442453008 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.202582 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.202629 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.202648 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.202670 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.202685 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.305521 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.305554 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.305564 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.305577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.305588 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.408588 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.408656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.408676 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.408703 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.408723 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.425000 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 22:43:40.074609165 +0000 UTC Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.427369 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.427423 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.427532 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.427595 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.427737 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:30 crc kubenswrapper[4998]: E0203 06:46:30.428111 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.510846 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.510892 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.510902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.510920 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.510931 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.614433 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.614489 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.614510 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.614534 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.614551 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.620012 4998 generic.go:334] "Generic (PLEG): container finished" podID="e25d5a00-1315-4327-aadd-fd81e45fb023" containerID="1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63" exitCode=0 Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.620084 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerDied","Data":"1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.625445 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.625766 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.636200 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.650761 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.668720 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.671772 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.690835 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.712528 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.716377 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.716403 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.716411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.716423 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.716431 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.728230 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.746632 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.757926 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.770599 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.791105 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.801566 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.811280 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.818663 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.818722 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.818733 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.818749 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.818759 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.822463 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.833133 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.842214 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.856119 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.877405 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.894352 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.907646 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.920529 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.921397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.921504 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.921517 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.921537 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.921550 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:30Z","lastTransitionTime":"2026-02-03T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.935217 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.960395 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.974406 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:30 crc kubenswrapper[4998]: I0203 06:46:30.986067 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:30Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.008074 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.023514 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.023555 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.023566 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.023580 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.023590 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.029149 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.047385 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.061234 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.070321 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.077814 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.125814 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.125847 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.125857 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.125871 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.125883 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.228187 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.228259 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.228278 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.228305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.228322 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.332084 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.332141 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.332160 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.332182 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.332197 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.426063 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 01:06:08.244387253 +0000 UTC Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.435151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.435188 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.435199 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.435214 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.435225 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.538535 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.538597 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.538620 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.538649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.538673 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.633575 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" event={"ID":"e25d5a00-1315-4327-aadd-fd81e45fb023","Type":"ContainerStarted","Data":"2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.633751 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.634579 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.641050 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.641126 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.641139 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.641155 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.641167 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.653120 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.663322 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.666461 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.682963 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.707297 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.719548 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.734372 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.743870 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.743911 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.743925 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.743946 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.743960 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.746761 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.777662 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.790480 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.804325 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.816389 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.826528 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.839307 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.846413 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.846486 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.846510 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.846538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.846561 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.852284 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.866374 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.877927 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.889803 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.907624 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.919644 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.933045 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.943919 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.948549 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.948594 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.948604 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.948622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.948636 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:31Z","lastTransitionTime":"2026-02-03T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.953845 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.966939 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.979553 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:31 crc kubenswrapper[4998]: I0203 06:46:31.990415 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:31Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.001820 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.017952 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.029535 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.042841 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.051196 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.051226 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.051236 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.051250 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.051259 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.059893 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.154490 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.154862 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.154875 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.154895 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.154911 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.257688 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.257726 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.257736 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.257750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.257760 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.360428 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.360460 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.360468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.360481 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.360490 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.426371 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 05:38:05.923258625 +0000 UTC Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.426508 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.426590 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:32 crc kubenswrapper[4998]: E0203 06:46:32.426754 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:32 crc kubenswrapper[4998]: E0203 06:46:32.426821 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.427173 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:32 crc kubenswrapper[4998]: E0203 06:46:32.427296 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.439481 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.451476 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.462549 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.462586 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.462595 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.462611 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.462620 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.465442 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.482895 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.498220 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.519059 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.539734 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.553131 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.562867 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.564446 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.564482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.564492 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.564509 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.564519 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.572556 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.586239 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.595424 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.602649 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.611007 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.626227 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:32Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.636242 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.666883 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.666925 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.666935 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.666949 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.666957 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.769239 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.769291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.769303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.769322 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.769335 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.838312 4998 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.872076 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.872111 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.872119 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.872132 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.872140 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.975053 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.975100 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.975111 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.975129 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:32 crc kubenswrapper[4998]: I0203 06:46:32.975140 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:32Z","lastTransitionTime":"2026-02-03T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.078434 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.078491 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.078506 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.078528 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.078543 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.181476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.181523 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.181538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.181560 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.181575 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.283932 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.283973 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.283982 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.283997 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.284008 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.386303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.386355 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.386365 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.386380 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.386390 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.426865 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 18:42:59.439703843 +0000 UTC Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.489083 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.489126 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.489135 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.489149 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.489159 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.591600 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.591638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.591647 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.591661 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.591670 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.639079 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.693501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.693538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.693546 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.693558 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.693567 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.796595 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.796636 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.796645 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.796677 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.796689 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.899181 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.899210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.899218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.899229 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.899238 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:33Z","lastTransitionTime":"2026-02-03T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:33 crc kubenswrapper[4998]: I0203 06:46:33.992742 4998 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.001907 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.001950 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.001962 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.001979 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.001991 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.104468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.104519 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.104529 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.104543 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.104553 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.207251 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.207291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.207305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.207321 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.207332 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.309823 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.309886 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.309905 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.309931 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.309949 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.413707 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.413812 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.413838 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.413863 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.413880 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.426981 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.427008 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.427026 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 03:56:02.922139274 +0000 UTC Feb 03 06:46:34 crc kubenswrapper[4998]: E0203 06:46:34.427207 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.427267 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:34 crc kubenswrapper[4998]: E0203 06:46:34.427419 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:34 crc kubenswrapper[4998]: E0203 06:46:34.427601 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.516678 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.516732 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.516748 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.516772 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.516819 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.583099 4998 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.620908 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.620975 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.621002 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.621035 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.621056 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.643831 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/0.log" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.646384 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d" exitCode=1 Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.646403 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.647321 4998 scope.go:117] "RemoveContainer" containerID="f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.659110 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.662664 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.676478 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.687710 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.698854 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.717454 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.724147 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.724209 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.724222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.724239 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.724251 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.734004 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.746852 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.762886 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.777886 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.795757 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.809353 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.826430 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.826469 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.826480 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.826493 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.826503 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.831911 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.852177 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.868711 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.890166 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.901661 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.911433 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.920575 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.929148 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.929188 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.929198 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.929229 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.929239 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:34Z","lastTransitionTime":"2026-02-03T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.930710 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.949125 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.963717 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.977339 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:34 crc kubenswrapper[4998]: I0203 06:46:34.990993 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:34Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.002251 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.013314 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.024469 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.031095 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.031133 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.031142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.031157 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.031169 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.037584 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.050361 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.068124 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.096621 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.133523 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.133562 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.133571 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.133587 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.133596 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.237085 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.237129 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.237137 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.237154 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.237164 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.339160 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.339208 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.339220 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.339240 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.339251 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.343241 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx"] Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.343703 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.345791 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.345872 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.357524 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.369804 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.381313 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.391435 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.404901 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.417004 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.427134 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 16:11:47.589510354 +0000 UTC Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.433360 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.441301 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.441338 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.441347 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.441362 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.441372 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.453071 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.453114 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/435a5c9b-684c-42ee-9519-13c14510718e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.453157 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssscp\" (UniqueName: \"kubernetes.io/projected/435a5c9b-684c-42ee-9519-13c14510718e-kube-api-access-ssscp\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.453227 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.454095 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.466990 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.476334 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.484680 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.505405 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.521107 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.533098 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.543860 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.543893 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.543901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.543914 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.543923 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.547572 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.554288 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.554417 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.554460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/435a5c9b-684c-42ee-9519-13c14510718e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.554520 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssscp\" (UniqueName: \"kubernetes.io/projected/435a5c9b-684c-42ee-9519-13c14510718e-kube-api-access-ssscp\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.554985 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.555331 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/435a5c9b-684c-42ee-9519-13c14510718e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.560036 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.561523 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/435a5c9b-684c-42ee-9519-13c14510718e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.567957 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssscp\" (UniqueName: \"kubernetes.io/projected/435a5c9b-684c-42ee-9519-13c14510718e-kube-api-access-ssscp\") pod \"ovnkube-control-plane-749d76644c-g7trx\" (UID: \"435a5c9b-684c-42ee-9519-13c14510718e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.647275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.647338 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.647355 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.647414 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.647435 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.650966 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/1.log" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.651896 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/0.log" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.655227 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477" exitCode=1 Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.655283 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.655331 4998 scope.go:117] "RemoveContainer" containerID="f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.657697 4998 scope.go:117] "RemoveContainer" containerID="bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477" Feb 03 06:46:35 crc kubenswrapper[4998]: E0203 06:46:35.658061 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.659396 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.676453 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.688113 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: W0203 06:46:35.691940 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod435a5c9b_684c_42ee_9519_13c14510718e.slice/crio-cdb5e8e4f64b4093a9353c5721279409fd894ff47e7dd9bed01e9a33a367cd4b WatchSource:0}: Error finding container cdb5e8e4f64b4093a9353c5721279409fd894ff47e7dd9bed01e9a33a367cd4b: Status 404 returned error can't find the container with id cdb5e8e4f64b4093a9353c5721279409fd894ff47e7dd9bed01e9a33a367cd4b Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.697102 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.717647 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.735102 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.749749 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.753044 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.753077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.753085 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.753098 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.753107 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.764247 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.776470 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.788801 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.799210 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.818325 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.830422 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.841387 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.854923 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.858231 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.858259 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.858268 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.858284 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.858293 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.867125 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.879435 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:35Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.960294 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.960328 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.960337 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.960359 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:35 crc kubenswrapper[4998]: I0203 06:46:35.960370 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:35Z","lastTransitionTime":"2026-02-03T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.062639 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.062684 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.062697 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.062715 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.062726 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.165119 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.165151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.165160 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.165175 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.165185 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.267050 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.267097 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.267112 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.267130 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.267145 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.369256 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.369283 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.369291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.369303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.369311 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.427541 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.427613 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:36 crc kubenswrapper[4998]: E0203 06:46:36.427753 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.427766 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 03:25:00.699461788 +0000 UTC Feb 03 06:46:36 crc kubenswrapper[4998]: E0203 06:46:36.427972 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.428092 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:36 crc kubenswrapper[4998]: E0203 06:46:36.428175 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.471423 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.471488 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.471504 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.471528 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.471546 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.574255 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.574286 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.574296 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.574308 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.574316 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.660241 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/1.log" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.667169 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" event={"ID":"435a5c9b-684c-42ee-9519-13c14510718e","Type":"ContainerStarted","Data":"994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.667211 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" event={"ID":"435a5c9b-684c-42ee-9519-13c14510718e","Type":"ContainerStarted","Data":"cdb5e8e4f64b4093a9353c5721279409fd894ff47e7dd9bed01e9a33a367cd4b"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.676856 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.676901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.676913 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.676941 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.676955 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.780559 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.780639 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.780658 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.780684 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.780703 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.859960 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-s5wml"] Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.860323 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:36 crc kubenswrapper[4998]: E0203 06:46:36.860365 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.871379 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.883481 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.883522 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.883531 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.883546 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.883559 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.886758 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.907277 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.924457 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.935501 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.946280 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.954721 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.963164 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.968605 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.969293 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwcm6\" (UniqueName: \"kubernetes.io/projected/7df2ae20-0aeb-4b1e-a408-c1903e061833-kube-api-access-bwcm6\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.975989 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.985621 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.985649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.985659 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.985674 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.985686 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:36Z","lastTransitionTime":"2026-02-03T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:36 crc kubenswrapper[4998]: I0203 06:46:36.990159 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:36Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.001892 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.012520 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.024325 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.035761 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.048956 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.065630 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.070774 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.070822 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwcm6\" (UniqueName: \"kubernetes.io/projected/7df2ae20-0aeb-4b1e-a408-c1903e061833-kube-api-access-bwcm6\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:37 crc kubenswrapper[4998]: E0203 06:46:37.071041 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:37 crc kubenswrapper[4998]: E0203 06:46:37.071156 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:37.571132752 +0000 UTC m=+35.857826748 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.082310 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.087335 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwcm6\" (UniqueName: \"kubernetes.io/projected/7df2ae20-0aeb-4b1e-a408-c1903e061833-kube-api-access-bwcm6\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.088110 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.088149 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.088162 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.088178 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.088189 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.190890 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.190939 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.190956 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.190974 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.190986 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.293356 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.293400 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.293410 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.293431 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.293445 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.395961 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.396025 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.396048 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.396077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.396101 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.428734 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 04:57:02.646600605 +0000 UTC Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.498616 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.498680 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.498696 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.498723 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.498742 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.576845 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:37 crc kubenswrapper[4998]: E0203 06:46:37.577040 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:37 crc kubenswrapper[4998]: E0203 06:46:37.577158 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:38.577125918 +0000 UTC m=+36.863819764 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.601660 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.601706 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.601724 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.601770 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.601814 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.674079 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" event={"ID":"435a5c9b-684c-42ee-9519-13c14510718e","Type":"ContainerStarted","Data":"58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.690460 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.703986 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.705055 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.705100 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.705114 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.705137 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.705158 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.722973 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.757292 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.777713 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.797745 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.807896 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.807964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.807982 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.808006 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.808022 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.812327 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.838020 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.855109 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.869237 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.883599 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.895244 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.908289 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.910022 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.910058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.910067 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.910079 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.910087 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:37Z","lastTransitionTime":"2026-02-03T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.921670 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.937948 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.950549 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:37 crc kubenswrapper[4998]: I0203 06:46:37.959506 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:37Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.012379 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.012420 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.012431 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.012455 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.012464 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.080382 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.080572 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:46:54.080532568 +0000 UTC m=+52.367226414 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.115449 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.115517 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.115532 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.115555 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.115568 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.125870 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.125919 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.125936 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.125959 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.125976 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.143636 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:38Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.148174 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.148286 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.148362 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.148434 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.148501 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.162601 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:38Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.166140 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.166202 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.166219 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.166248 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.166266 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.178903 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:38Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.181353 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.181391 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.181412 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.181444 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181512 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181548 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181581 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181597 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181565 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:54.181552424 +0000 UTC m=+52.468246220 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181673 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:54.181651857 +0000 UTC m=+52.468345833 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181547 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181749 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181763 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181772 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181879 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:54.181835503 +0000 UTC m=+52.468529349 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.181907 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:54.181896905 +0000 UTC m=+52.468590701 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.182836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.182945 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.183015 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.183096 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.183154 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.195851 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:38Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.199309 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.199354 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.199368 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.199385 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.199396 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.216639 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:38Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.216965 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.218608 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.218651 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.218660 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.218672 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.218682 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.320732 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.320789 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.320799 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.320814 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.320822 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.423141 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.423225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.423249 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.423280 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.423305 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.427442 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.427518 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.427573 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.427700 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.427899 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.428057 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.428093 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.428169 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.429254 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:30:35.434468092 +0000 UTC Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.525897 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.525931 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.525944 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.525960 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.525971 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.586475 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.586774 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: E0203 06:46:38.586909 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:40.586878696 +0000 UTC m=+38.873572542 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.628529 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.628590 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.628607 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.628633 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.628651 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.730874 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.730919 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.730931 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.730949 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.730961 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.834257 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.834324 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.834345 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.834378 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.834399 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.937426 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.937493 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.937511 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.937534 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:38 crc kubenswrapper[4998]: I0203 06:46:38.937552 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:38Z","lastTransitionTime":"2026-02-03T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.040316 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.040380 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.040397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.040421 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.040439 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.143260 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.143320 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.143343 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.143371 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.143394 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.247071 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.247142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.247164 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.247194 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.247217 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.350902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.350957 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.350979 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.351006 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.351030 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.429593 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 16:33:30.424492514 +0000 UTC Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.454909 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.455125 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.455164 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.455198 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.455221 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.558355 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.558407 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.558418 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.558437 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.558449 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.661347 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.661419 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.661450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.661482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.661502 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.764907 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.764973 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.764990 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.765015 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.765033 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.867499 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.867536 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.867546 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.867560 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.867571 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.970432 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.970839 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.971069 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.971260 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:39 crc kubenswrapper[4998]: I0203 06:46:39.971602 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:39Z","lastTransitionTime":"2026-02-03T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.075325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.075881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.076144 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.076430 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.076632 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.179304 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.179372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.179391 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.179411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.179425 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.282142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.282223 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.282247 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.282280 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.282305 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.385427 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.385483 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.385499 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.385522 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.385540 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.427100 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.427150 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.427150 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.427447 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.427549 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.427351 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.427620 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.427714 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.431352 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 15:59:48.401447529 +0000 UTC Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.488720 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.488836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.488861 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.488889 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.488956 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.591492 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.591552 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.591589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.591619 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.591640 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.608365 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.608507 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:40 crc kubenswrapper[4998]: E0203 06:46:40.608560 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:44.608546545 +0000 UTC m=+42.895240351 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.694650 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.694716 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.694738 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.694766 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.694813 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.797711 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.797813 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.797839 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.797869 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.797891 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.901074 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.901130 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.901142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.901160 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:40 crc kubenswrapper[4998]: I0203 06:46:40.901172 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:40Z","lastTransitionTime":"2026-02-03T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.004970 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.005037 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.005061 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.005091 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.005113 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.107809 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.107856 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.107882 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.107902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.107914 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.210885 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.210934 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.210946 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.210961 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.210970 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.313713 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.313822 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.313845 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.313868 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.313885 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.416731 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.416803 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.416832 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.416847 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.416855 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.432223 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 18:12:26.967213874 +0000 UTC Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.519576 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.519610 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.519618 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.519633 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.519642 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.622139 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.622189 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.622205 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.622227 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.622243 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.725563 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.725639 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.725661 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.725690 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.725712 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.829216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.829271 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.829288 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.829312 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.829331 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.931767 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.931823 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.931835 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.931853 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:41 crc kubenswrapper[4998]: I0203 06:46:41.931865 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:41Z","lastTransitionTime":"2026-02-03T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.034695 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.034763 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.034805 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.034833 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.034851 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.138111 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.138195 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.138218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.138244 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.138263 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.240897 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.240964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.240985 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.241013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.241034 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.343726 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.343872 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.343899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.343929 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.343951 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.427113 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.427245 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.427590 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.427646 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:42 crc kubenswrapper[4998]: E0203 06:46:42.427847 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:42 crc kubenswrapper[4998]: E0203 06:46:42.428095 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:42 crc kubenswrapper[4998]: E0203 06:46:42.427478 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:42 crc kubenswrapper[4998]: E0203 06:46:42.428537 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.432360 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 20:24:05.559756189 +0000 UTC Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.446089 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.446158 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.446180 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.446210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.446231 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.452690 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.466762 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.489979 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.507814 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.529268 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.545508 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.550115 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.550187 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.550210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.550245 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.550270 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.577581 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.594933 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.613623 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.627177 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.639244 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.649880 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.652325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.652383 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.652395 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.652431 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.652447 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.661759 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.677517 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.697829 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4a5a155685f32631be1e7448aede399af32fd65f4c99ada08f1e72ae829a09d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:33Z\\\",\\\"message\\\":\\\"roller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 06:46:33.635802 6330 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 06:46:33.635861 6330 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 06:46:33.635872 6330 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 06:46:33.635906 6330 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 06:46:33.635915 6330 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 06:46:33.635921 6330 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 06:46:33.635935 6330 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 06:46:33.635946 6330 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 06:46:33.635955 6330 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 06:46:33.635968 6330 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 06:46:33.635969 6330 factory.go:656] Stopping watch factory\\\\nI0203 06:46:33.635938 6330 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 06:46:33.635982 6330 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 06:46:33.635987 6330 ovnkube.go:599] Stopped ovnkube\\\\nI0203 06:46:33.635992 6330 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.713045 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.724160 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:42Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.755608 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.755630 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.755638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.755649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.755658 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.858355 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.858436 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.858447 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.858468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.858483 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.961961 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.962064 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.962085 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.962116 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:42 crc kubenswrapper[4998]: I0203 06:46:42.962137 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:42Z","lastTransitionTime":"2026-02-03T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.066081 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.066122 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.066131 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.066147 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.066159 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.170000 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.170055 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.170077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.170105 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.170126 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.272825 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.273172 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.273325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.273538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.273773 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.330293 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.331857 4998 scope.go:117] "RemoveContainer" containerID="bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477" Feb 03 06:46:43 crc kubenswrapper[4998]: E0203 06:46:43.332118 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.355310 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.374374 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.376337 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.376433 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.376461 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.376539 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.376566 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.396182 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.416750 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.427750 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.433227 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 06:40:55.081351189 +0000 UTC Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.441867 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.466250 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.479124 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.479173 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.479190 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.479210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.479224 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.487407 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.507884 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.522458 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.537609 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.552569 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.573402 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.582250 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.582301 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.582318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.582343 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.582361 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.590880 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.604665 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.621124 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.640900 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:43Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.685244 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.685306 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.685318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.685338 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.685352 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.788495 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.788545 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.788562 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.788588 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.788604 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.891638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.891709 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.891721 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.891757 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.891769 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.994226 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.994384 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.994397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.994413 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:43 crc kubenswrapper[4998]: I0203 06:46:43.994425 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:43Z","lastTransitionTime":"2026-02-03T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.097387 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.097432 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.097441 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.097483 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.097495 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.200401 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.200440 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.200449 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.200462 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.200471 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.304322 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.304363 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.304371 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.304387 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.304398 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.407140 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.407209 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.407222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.407246 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.407257 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.426651 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.426840 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.426965 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.426999 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.427037 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.427242 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.427343 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.427468 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.433518 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 16:43:49.740707294 +0000 UTC Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.510923 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.510971 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.510987 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.511010 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.511063 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.613678 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.613814 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.613836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.613858 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.613874 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.652461 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.652642 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:44 crc kubenswrapper[4998]: E0203 06:46:44.652743 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:46:52.652719568 +0000 UTC m=+50.939413414 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.716983 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.717038 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.717050 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.717066 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.717077 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.820867 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.820918 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.820932 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.820951 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.820963 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.924663 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.924730 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.924747 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.924773 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:44 crc kubenswrapper[4998]: I0203 06:46:44.924825 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:44Z","lastTransitionTime":"2026-02-03T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.027948 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.028004 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.028019 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.028040 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.028054 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.131247 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.131303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.131315 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.131337 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.131351 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.233625 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.233710 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.233734 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.233776 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.233851 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.337146 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.337190 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.337203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.337222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.337235 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.434554 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 04:53:23.962634068 +0000 UTC Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.439616 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.439656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.439665 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.439680 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.439689 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.542842 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.542907 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.542921 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.542940 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.542954 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.647316 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.647392 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.647431 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.647462 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.647486 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.750213 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.750283 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.750305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.750334 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.750355 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.853899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.854031 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.854051 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.854075 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.854092 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.957143 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.957203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.957218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.957242 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:45 crc kubenswrapper[4998]: I0203 06:46:45.957257 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:45Z","lastTransitionTime":"2026-02-03T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.060554 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.060620 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.060637 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.060662 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.060679 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.164225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.164326 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.164344 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.164401 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.164421 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.266906 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.266967 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.267013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.267034 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.267049 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.369368 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.369403 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.369412 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.369425 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.369433 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.427243 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:46 crc kubenswrapper[4998]: E0203 06:46:46.427462 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.427469 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.427528 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.427513 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:46 crc kubenswrapper[4998]: E0203 06:46:46.427646 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:46 crc kubenswrapper[4998]: E0203 06:46:46.427701 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:46 crc kubenswrapper[4998]: E0203 06:46:46.427920 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.435078 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 17:14:01.854937139 +0000 UTC Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.472069 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.472134 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.472151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.472174 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.472191 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.575315 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.575378 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.575402 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.575434 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.575457 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.677445 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.677496 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.677514 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.677532 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.677543 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.779457 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.779548 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.779595 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.779622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.779641 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.883189 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.883258 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.883278 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.883305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.883322 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.986873 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.986935 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.986951 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.986976 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:46 crc kubenswrapper[4998]: I0203 06:46:46.986995 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:46Z","lastTransitionTime":"2026-02-03T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.092070 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.092351 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.092485 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.092527 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.092549 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.195734 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.195832 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.195854 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.195880 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.195899 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.299901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.299986 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.300005 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.300032 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.300050 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.403099 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.403151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.403168 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.403192 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.403214 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.436181 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 20:22:45.682907915 +0000 UTC Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.505828 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.505862 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.505872 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.505887 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.505898 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.609181 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.609259 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.609279 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.609304 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.609321 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.712285 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.712585 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.712676 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.712750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.712839 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.815703 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.815979 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.816010 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.816038 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.816056 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.918508 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.918552 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.918567 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.918587 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:47 crc kubenswrapper[4998]: I0203 06:46:47.918601 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:47Z","lastTransitionTime":"2026-02-03T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.022032 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.022096 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.022112 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.022135 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.022162 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.125469 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.125525 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.125542 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.125565 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.125582 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.228638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.228710 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.228720 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.228740 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.228761 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.331397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.331476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.331501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.331533 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.331556 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.394825 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.394882 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.394896 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.394917 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.394931 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.411356 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:48Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.414825 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.414865 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.414875 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.414891 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.414903 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.426493 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.426301 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:48Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.426507 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.426760 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.427090 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.427203 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.427282 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.427351 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.427387 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.432174 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.432223 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.432238 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.432260 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.432275 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.436533 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:38:24.767433054 +0000 UTC Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.449500 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:48Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.453506 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.453553 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.453562 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.453583 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.453594 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.465775 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:48Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.469130 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.469194 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.469203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.469216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.469693 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.486328 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:48Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:48 crc kubenswrapper[4998]: E0203 06:46:48.486445 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.488525 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.488589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.488610 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.488636 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.488654 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.591648 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.591712 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.591730 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.591755 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.591772 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.694898 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.694990 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.695006 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.695028 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.695043 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.797086 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.797125 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.797136 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.797153 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.797167 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.900428 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.900824 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.901025 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.901185 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:48 crc kubenswrapper[4998]: I0203 06:46:48.901324 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:48Z","lastTransitionTime":"2026-02-03T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.003912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.004220 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.004321 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.004411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.004507 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.107874 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.107925 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.107942 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.107964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.107981 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.195869 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.203555 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.207441 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.210623 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.210791 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.210886 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.210987 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.211076 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.220652 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.231541 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.243993 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.268195 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.283902 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.298542 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.308719 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.313611 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.313657 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.313674 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.313690 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.313701 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.324220 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.337235 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.351266 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.363871 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.375533 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.393055 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.411395 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.415991 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.416016 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.416025 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.416037 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.416047 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.430831 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.436878 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 09:32:36.761446064 +0000 UTC Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.447284 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:49Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.518219 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.518270 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.518283 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.518301 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.518316 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.621319 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.621389 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.621407 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.621433 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.621450 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.724645 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.724722 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.724756 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.724821 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.724847 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.828204 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.828246 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.828257 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.828271 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.828282 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.933508 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.933558 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.933573 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.933592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:49 crc kubenswrapper[4998]: I0203 06:46:49.933610 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:49Z","lastTransitionTime":"2026-02-03T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.036611 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.036670 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.036684 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.036703 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.036719 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.140076 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.140125 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.140138 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.140157 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.140169 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.242850 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.242899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.242912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.242937 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.242962 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.346446 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.346862 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.347126 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.347349 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.347594 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.427574 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.427658 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.427666 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:50 crc kubenswrapper[4998]: E0203 06:46:50.427756 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.427856 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:50 crc kubenswrapper[4998]: E0203 06:46:50.427907 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:50 crc kubenswrapper[4998]: E0203 06:46:50.428197 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:50 crc kubenswrapper[4998]: E0203 06:46:50.428367 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.437672 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 01:07:03.705889388 +0000 UTC Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.450674 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.450940 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.451029 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.451107 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.451181 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.554057 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.554390 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.554527 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.554657 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.554775 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.658291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.658352 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.658373 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.658401 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.658422 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.760410 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.760451 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.760463 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.760482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.760496 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.863467 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.863535 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.863553 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.863580 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.863599 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.966889 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.966934 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.966947 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.966965 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:50 crc kubenswrapper[4998]: I0203 06:46:50.966979 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:50Z","lastTransitionTime":"2026-02-03T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.069659 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.069702 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.069715 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.069730 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.069743 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.172184 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.172247 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.172261 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.172279 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.172291 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.275010 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.275058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.275074 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.275100 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.275116 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.377656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.377693 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.377704 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.377720 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.377734 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.438666 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 12:02:17.991293911 +0000 UTC Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.480548 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.480601 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.480619 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.480643 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.480660 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.583007 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.583082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.583100 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.583128 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.583145 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.685941 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.685988 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.686013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.686031 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.686044 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.788318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.788375 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.788391 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.788414 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.788431 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.891203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.891257 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.891280 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.891299 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.891313 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.993808 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.993867 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.993879 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.993894 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:51 crc kubenswrapper[4998]: I0203 06:46:51.993904 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:51Z","lastTransitionTime":"2026-02-03T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.098248 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.098297 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.098307 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.098325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.098336 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.201559 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.201609 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.201632 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.201656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.201675 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.304261 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.304693 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.304821 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.304994 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.305148 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.407813 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.408220 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.408325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.408434 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.408522 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.427205 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.427254 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.427382 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.427386 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.427434 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.427470 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.427593 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.427679 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.439863 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 09:16:35.119322062 +0000 UTC Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.448139 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.467977 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.483561 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.510689 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.510726 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.510737 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.510756 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.510769 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.512883 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.531812 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.545515 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.556992 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.569991 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.583017 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.594818 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.606061 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.613020 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.613272 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.613399 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.613486 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.613552 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.625218 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.638656 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.656857 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.669964 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.685528 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.697661 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.710327 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:52Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.716177 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.716213 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.716225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.716241 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.716254 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.738760 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.738920 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:52 crc kubenswrapper[4998]: E0203 06:46:52.738980 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:08.738961772 +0000 UTC m=+67.025655578 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.819222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.819292 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.819314 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.819348 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.819370 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.925542 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.926042 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.926233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.927015 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:52 crc kubenswrapper[4998]: I0203 06:46:52.927278 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:52Z","lastTransitionTime":"2026-02-03T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.030038 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.030087 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.030098 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.030115 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.030128 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.133180 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.133254 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.133273 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.133297 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.133315 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.236671 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.236719 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.236733 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.236750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.236761 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.339530 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.339586 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.339609 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.339637 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.339656 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.440043 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 16:36:14.665359826 +0000 UTC Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.442447 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.442515 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.442536 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.442562 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.442579 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.545902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.545965 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.545980 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.546001 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.546015 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.648952 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.648998 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.649010 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.649029 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.649044 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.751911 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.751953 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.751963 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.751980 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.751991 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.854133 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.854182 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.854197 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.854217 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.854232 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.957367 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.957404 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.957411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.957424 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:53 crc kubenswrapper[4998]: I0203 06:46:53.957432 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:53Z","lastTransitionTime":"2026-02-03T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.060275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.060313 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.060325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.060341 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.060351 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.155760 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.155982 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:26.155952208 +0000 UTC m=+84.442646204 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.161981 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.162003 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.162011 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.162024 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.162033 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.257396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.257441 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.257460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.257480 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257605 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257619 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257629 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257661 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257727 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257737 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257743 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257858 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257684 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:26.257672195 +0000 UTC m=+84.544366001 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257945 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:26.257908812 +0000 UTC m=+84.544602648 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257967 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:26.257955163 +0000 UTC m=+84.544648999 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.257989 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:26.257977884 +0000 UTC m=+84.544671730 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.264544 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.264662 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.264675 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.264688 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.264697 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.368516 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.368572 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.368588 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.368611 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.368629 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.427006 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.427062 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.427111 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.427143 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.427287 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.427429 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.427545 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:54 crc kubenswrapper[4998]: E0203 06:46:54.427614 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.440984 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 07:14:01.180460657 +0000 UTC Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.471577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.471638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.471653 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.471669 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.471681 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.574408 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.574450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.574466 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.574486 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.574501 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.677798 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.677847 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.677861 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.677879 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.677892 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.780471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.780554 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.780577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.780605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.780630 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.883885 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.883937 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.883955 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.883976 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.883989 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.986759 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.986834 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.986846 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.986866 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:54 crc kubenswrapper[4998]: I0203 06:46:54.986879 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:54Z","lastTransitionTime":"2026-02-03T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.090237 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.090307 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.090337 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.090366 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.090388 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.196245 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.197353 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.197401 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.197424 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.197437 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.300012 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.300074 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.300087 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.300103 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.300114 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.403341 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.403409 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.403426 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.403450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.403469 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.441974 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 22:22:57.399210641 +0000 UTC Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.506319 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.506413 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.506428 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.506448 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.506464 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.609033 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.609071 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.609083 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.609101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.609115 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.711206 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.711236 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.711246 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.711259 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.711269 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.814637 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.815015 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.815182 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.815396 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.815557 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.918641 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.918688 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.918701 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.918718 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:55 crc kubenswrapper[4998]: I0203 06:46:55.918731 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:55Z","lastTransitionTime":"2026-02-03T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.022089 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.022132 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.022144 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.022161 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.022173 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.125218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.125274 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.125289 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.125310 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.125322 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.228412 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.228488 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.228511 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.228544 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.228566 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.331696 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.331753 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.331771 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.331824 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.331843 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.426820 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:56 crc kubenswrapper[4998]: E0203 06:46:56.426950 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.427091 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:56 crc kubenswrapper[4998]: E0203 06:46:56.427331 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.427361 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:56 crc kubenswrapper[4998]: E0203 06:46:56.427513 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.427377 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:56 crc kubenswrapper[4998]: E0203 06:46:56.427624 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.434153 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.434183 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.434191 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.434201 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.434210 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.442218 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 09:46:17.107675285 +0000 UTC Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.536968 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.537013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.537027 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.537045 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.537059 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.640467 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.640508 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.640526 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.640551 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.640568 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.743313 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.743352 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.743363 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.743378 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.743391 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.845904 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.845984 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.846007 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.846038 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.846064 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.948477 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.948551 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.948562 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.948577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:56 crc kubenswrapper[4998]: I0203 06:46:56.948586 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:56Z","lastTransitionTime":"2026-02-03T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.051988 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.052048 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.052070 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.052098 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.052123 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.155299 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.155351 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.155370 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.155393 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.155409 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.258407 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.258455 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.258468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.258552 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.258616 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.361728 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.361758 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.361766 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.361795 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.361806 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.443688 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 17:34:23.549640132 +0000 UTC Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.465657 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.465726 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.465740 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.465761 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.465796 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.568019 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.568045 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.568053 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.568066 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.568079 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.670776 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.671047 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.671129 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.671210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.671279 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.773540 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.773591 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.773607 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.773629 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.773643 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.877210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.877273 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.877292 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.877319 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.877340 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.979918 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.979967 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.979978 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.979996 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:57 crc kubenswrapper[4998]: I0203 06:46:57.980367 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:57Z","lastTransitionTime":"2026-02-03T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.083673 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.084185 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.084349 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.084504 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.084669 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.188307 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.188646 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.188753 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.188904 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.189040 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.291888 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.291940 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.291960 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.291985 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.292002 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.394551 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.394586 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.394595 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.394607 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.394615 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.427129 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.427194 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.427270 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.427324 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.427864 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.428009 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.428186 4998 scope.go:117] "RemoveContainer" containerID="bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477" Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.428348 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.428239 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.444813 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 16:38:51.90271645 +0000 UTC Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.497176 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.497209 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.497217 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.497231 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.497239 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.600733 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.600853 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.600871 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.600930 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.600949 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.703731 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.703812 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.703830 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.703852 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.703864 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.785667 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.785701 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.785708 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.785723 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.785733 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.796407 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:58Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.799468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.799497 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.799522 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.799535 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.799543 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.810415 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:58Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.814608 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.814649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.814661 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.814686 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.814694 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.825012 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:58Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.829298 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.829364 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.829382 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.829419 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.829437 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.845627 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:58Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.849390 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.849502 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.849678 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.849858 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.849957 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.866249 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:58Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:58 crc kubenswrapper[4998]: E0203 06:46:58.866591 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.869579 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.869704 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.869774 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.870071 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.870141 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.972428 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.972456 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.972468 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.972484 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:58 crc kubenswrapper[4998]: I0203 06:46:58.972494 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:58Z","lastTransitionTime":"2026-02-03T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.075105 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.075151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.075161 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.075176 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.075187 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.177918 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.177977 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.177992 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.178013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.178029 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.280552 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.280612 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.280628 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.280656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.280673 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.383984 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.384062 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.384082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.384097 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.384108 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.445918 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 01:40:25.951291129 +0000 UTC Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.487020 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.487056 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.487070 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.487088 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.487101 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.590078 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.590112 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.590120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.590135 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.590153 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.692664 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.692899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.693018 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.693100 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.693175 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.756681 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/2.log" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.757561 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/1.log" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.760727 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" exitCode=1 Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.760791 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.760845 4998 scope.go:117] "RemoveContainer" containerID="bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.761425 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:46:59 crc kubenswrapper[4998]: E0203 06:46:59.761562 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.781561 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.795582 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.795619 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.795629 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.795652 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.795663 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.797124 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.812545 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.829121 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.846980 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.859675 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.870469 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.889968 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.897999 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.898031 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.898039 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.898052 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.898060 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.904148 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.921031 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.932822 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.944077 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.955360 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.969359 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.981168 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.992557 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:46:59Z is after 2025-08-24T17:21:41Z" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.999904 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:46:59 crc kubenswrapper[4998]: I0203 06:46:59.999944 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:46:59.999954 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:46:59.999969 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:46:59.999981 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:46:59Z","lastTransitionTime":"2026-02-03T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.002448 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:00Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.013059 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:00Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.102132 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.102178 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.102189 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.102206 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.102218 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.204948 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.204991 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.205008 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.205033 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.205050 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.308904 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.308960 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.308982 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.309014 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.309042 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.411730 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.411805 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.411817 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.411837 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.411849 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.426742 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.426949 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:00 crc kubenswrapper[4998]: E0203 06:47:00.427008 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.427027 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.427049 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:00 crc kubenswrapper[4998]: E0203 06:47:00.427150 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:00 crc kubenswrapper[4998]: E0203 06:47:00.427289 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:00 crc kubenswrapper[4998]: E0203 06:47:00.427588 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.446052 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 23:09:20.967193106 +0000 UTC Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.514822 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.515203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.515274 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.515407 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.515485 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.619395 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.619464 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.619476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.619501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.619516 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.722218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.722270 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.722280 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.722301 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.722317 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.767892 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/2.log" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.825253 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.825315 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.825334 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.825367 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.825385 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.928216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.928277 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.928300 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.928332 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:00 crc kubenswrapper[4998]: I0203 06:47:00.928355 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:00Z","lastTransitionTime":"2026-02-03T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.031211 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.031686 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.031800 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.031899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.031965 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.134754 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.135163 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.136120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.136182 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.136208 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.239759 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.239878 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.239901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.239929 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.239948 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.343672 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.343750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.343774 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.343838 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.343861 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446311 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446359 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446376 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446402 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446415 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.446994 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 18:01:00.603634951 +0000 UTC Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.549124 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.549201 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.549223 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.549252 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.549275 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.652246 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.652315 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.652340 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.652371 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.652395 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.756007 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.756046 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.756060 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.756079 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.756092 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.859031 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.859097 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.859120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.859149 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.859169 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.962225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.962270 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.962283 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.962298 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:01 crc kubenswrapper[4998]: I0203 06:47:01.962308 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:01Z","lastTransitionTime":"2026-02-03T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.065735 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.065817 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.065829 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.065846 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.065858 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.168671 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.168719 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.168734 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.168755 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.168768 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.271149 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.271191 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.271201 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.271216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.271225 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.375901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.375941 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.375952 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.375977 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.375989 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.426398 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.426487 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:02 crc kubenswrapper[4998]: E0203 06:47:02.426644 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.426679 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.426741 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:02 crc kubenswrapper[4998]: E0203 06:47:02.426836 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:02 crc kubenswrapper[4998]: E0203 06:47:02.426987 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:02 crc kubenswrapper[4998]: E0203 06:47:02.427022 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.444172 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.447380 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 00:01:04.624852528 +0000 UTC Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.455797 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.470310 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.478900 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.478943 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.478954 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.478980 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.478992 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.481729 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.492350 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.503265 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.526580 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.541145 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.555911 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.566565 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.577827 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.581520 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.581558 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.581574 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.581589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.581601 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.590286 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.603655 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.617941 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.641523 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.654555 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.666208 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.681420 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:02Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.683411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.683450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.683459 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.683473 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.683485 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.785222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.785254 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.785263 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.785275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.785284 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.887439 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.887476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.887484 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.887497 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.887505 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.991213 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.991274 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.991294 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.991319 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:02 crc kubenswrapper[4998]: I0203 06:47:02.991338 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:02Z","lastTransitionTime":"2026-02-03T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.094264 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.094318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.094326 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.094340 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.094349 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.198043 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.198540 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.198563 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.198592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.198613 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.301535 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.301601 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.301622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.301649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.301668 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.404156 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.404193 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.404219 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.404234 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.404243 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.448393 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 06:33:41.331392961 +0000 UTC Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.507175 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.507253 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.507264 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.507280 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.507292 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.610311 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.610356 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.610368 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.610384 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.610396 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.713408 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.713474 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.713493 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.713520 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.713539 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.817120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.817199 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.817221 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.817251 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.817275 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.920320 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.920378 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.920390 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.920412 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:03 crc kubenswrapper[4998]: I0203 06:47:03.920426 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:03Z","lastTransitionTime":"2026-02-03T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.023888 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.023936 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.023948 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.023964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.023976 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.127323 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.127400 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.127411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.127432 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.127447 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.229272 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.229339 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.229351 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.229369 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.229382 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.332200 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.332254 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.332265 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.332282 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.332294 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.427029 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:04 crc kubenswrapper[4998]: E0203 06:47:04.427159 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.427206 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.427276 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.427317 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:04 crc kubenswrapper[4998]: E0203 06:47:04.427431 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:04 crc kubenswrapper[4998]: E0203 06:47:04.427580 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:04 crc kubenswrapper[4998]: E0203 06:47:04.427645 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.434001 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.434058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.434077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.434101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.434119 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.449326 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 20:44:38.17419571 +0000 UTC Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.537705 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.537823 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.537843 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.537870 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.537889 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.639883 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.639931 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.639945 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.639965 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.639981 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.742643 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.742699 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.742707 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.742726 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.742736 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.845186 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.845224 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.845233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.845245 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.845253 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.948746 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.948836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.948856 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.948881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:04 crc kubenswrapper[4998]: I0203 06:47:04.948899 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:04Z","lastTransitionTime":"2026-02-03T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.051980 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.052032 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.052048 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.052072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.052088 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.156020 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.156082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.156101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.156124 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.156141 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.259082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.259147 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.259173 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.259203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.259226 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.363078 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.363138 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.363154 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.363180 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.363198 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.449449 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 17:33:54.972569109 +0000 UTC Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.466228 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.466284 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.466303 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.466329 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.466350 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.568829 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.568890 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.568906 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.568931 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.568948 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.672582 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.672638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.672651 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.672671 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.672688 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.775205 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.775252 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.775261 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.775278 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.775288 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.878069 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.878117 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.878125 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.878143 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.878154 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.980944 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.981031 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.981045 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.981072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:05 crc kubenswrapper[4998]: I0203 06:47:05.981088 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:05Z","lastTransitionTime":"2026-02-03T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.084350 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.084419 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.084436 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.084461 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.084478 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.187539 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.187605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.187622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.187647 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.187666 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.291353 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.291411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.291423 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.291441 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.291453 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.394830 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.394890 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.394908 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.394930 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.394959 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.427383 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.427518 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.427596 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:06 crc kubenswrapper[4998]: E0203 06:47:06.427577 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.427620 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:06 crc kubenswrapper[4998]: E0203 06:47:06.427849 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:06 crc kubenswrapper[4998]: E0203 06:47:06.427968 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:06 crc kubenswrapper[4998]: E0203 06:47:06.428099 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.449877 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 03:24:00.858877432 +0000 UTC Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.498586 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.498652 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.498671 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.498696 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.498718 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.601356 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.601403 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.601416 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.601433 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.601444 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.704372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.704445 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.704462 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.704487 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.704508 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.806450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.806498 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.806509 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.806523 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.806531 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.909313 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.909357 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.909372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.909392 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:06 crc kubenswrapper[4998]: I0203 06:47:06.909404 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:06Z","lastTransitionTime":"2026-02-03T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.012616 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.012681 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.012707 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.012739 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.012762 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.115466 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.115530 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.115567 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.115592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.115610 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.218758 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.218816 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.218833 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.218855 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.218872 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.321756 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.321842 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.321861 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.321888 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.321908 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.424861 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.425275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.425357 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.425429 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.425493 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.450484 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 07:08:36.699399306 +0000 UTC Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.528605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.528640 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.528648 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.528662 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.528670 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.631823 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.631879 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.631891 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.631913 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.631926 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.735307 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.735365 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.735378 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.735397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.736233 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.839230 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.839659 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.839944 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.840081 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.840186 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.942527 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.942567 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.942577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.942592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:07 crc kubenswrapper[4998]: I0203 06:47:07.942604 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:07Z","lastTransitionTime":"2026-02-03T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.045427 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.045471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.045485 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.045501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.045512 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.147991 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.148292 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.148402 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.148493 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.148567 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.251095 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.251134 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.251146 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.251162 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.251172 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.353947 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.354225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.354306 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.354367 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.354423 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.426565 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.426695 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.426932 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.427066 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.427140 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.427067 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.427319 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.427401 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.451274 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 15:53:26.471266743 +0000 UTC Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.457053 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.457105 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.457117 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.457134 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.457150 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.559128 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.559206 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.559225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.559249 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.559266 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.661873 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.661942 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.661953 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.661967 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.661980 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.764282 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.764318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.764327 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.764340 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.764349 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.821958 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.822083 4998 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:47:08 crc kubenswrapper[4998]: E0203 06:47:08.822134 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs podName:7df2ae20-0aeb-4b1e-a408-c1903e061833 nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.822119349 +0000 UTC m=+99.108813155 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs") pod "network-metrics-daemon-s5wml" (UID: "7df2ae20-0aeb-4b1e-a408-c1903e061833") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.866865 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.866908 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.866918 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.866932 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.866944 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.969332 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.969364 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.969372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.969385 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:08 crc kubenswrapper[4998]: I0203 06:47:08.969395 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:08Z","lastTransitionTime":"2026-02-03T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.013041 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.013109 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.013123 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.013146 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.013160 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.027644 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:09Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.033133 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.033174 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.033182 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.033195 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.033204 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.047285 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:09Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.050678 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.050761 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.050790 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.050858 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.050887 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.070000 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:09Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.077624 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.077659 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.077669 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.077685 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.077695 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.093014 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:09Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.097314 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.097345 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.097356 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.097372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.097382 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.108633 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:09Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:09 crc kubenswrapper[4998]: E0203 06:47:09.108782 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.110462 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.110533 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.110551 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.110999 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.111056 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.213747 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.213807 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.213818 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.213832 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.213842 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.316984 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.317058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.317082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.317104 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.317122 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.420933 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.420977 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.420989 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.421008 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.421025 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.452140 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 15:14:58.257974093 +0000 UTC Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.523638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.523682 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.523691 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.523703 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.523712 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.626365 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.626398 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.626406 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.626418 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.626426 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.729069 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.729101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.729109 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.729121 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.729130 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.832012 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.832077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.832095 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.832119 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.832135 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.935139 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.935202 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.935217 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.935239 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:09 crc kubenswrapper[4998]: I0203 06:47:09.935254 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:09Z","lastTransitionTime":"2026-02-03T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.038516 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.038583 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.038593 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.038605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.038615 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.141589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.141624 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.141632 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.141648 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.141657 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.244227 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.244292 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.244301 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.244313 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.244322 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.347113 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.347153 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.347163 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.347178 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.347190 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.426457 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.426485 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.426485 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.426504 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:10 crc kubenswrapper[4998]: E0203 06:47:10.426594 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:10 crc kubenswrapper[4998]: E0203 06:47:10.426677 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:10 crc kubenswrapper[4998]: E0203 06:47:10.426754 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:10 crc kubenswrapper[4998]: E0203 06:47:10.426854 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.449216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.449258 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.449271 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.449284 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.449297 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.452569 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 08:48:10.913440389 +0000 UTC Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.551773 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.551828 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.551840 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.551856 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.551868 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.654027 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.654075 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.654084 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.654101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.654113 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.755876 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.756122 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.756210 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.756285 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.756346 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.858852 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.858886 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.858898 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.858912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.858923 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.960482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.960519 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.960530 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.960546 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:10 crc kubenswrapper[4998]: I0203 06:47:10.960557 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:10Z","lastTransitionTime":"2026-02-03T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.063029 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.063080 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.063091 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.063112 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.063123 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.166916 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.166965 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.166977 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.166994 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.167006 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.272226 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.272267 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.272276 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.272291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.272305 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.375750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.375956 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.375976 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.376001 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.376022 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.452659 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 10:05:58.00846169 +0000 UTC Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.478180 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.478222 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.478233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.478250 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.478263 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.580808 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.580928 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.580941 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.580956 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.580966 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.684058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.684136 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.684159 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.684190 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.684214 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.786963 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.787005 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.787017 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.787035 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.787050 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.889417 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.889467 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.889476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.889493 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.889532 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.993271 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.993304 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.993316 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.993333 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:11 crc kubenswrapper[4998]: I0203 06:47:11.993345 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:11Z","lastTransitionTime":"2026-02-03T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.096094 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.096187 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.096221 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.096249 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.096269 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.199248 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.199314 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.199327 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.199349 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.199361 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.301458 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.301507 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.301519 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.301536 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.301550 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.404372 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.404433 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.404451 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.404476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.404494 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.427551 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.427551 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.427633 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.427686 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:12 crc kubenswrapper[4998]: E0203 06:47:12.428357 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:12 crc kubenswrapper[4998]: E0203 06:47:12.428554 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:12 crc kubenswrapper[4998]: E0203 06:47:12.428834 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:12 crc kubenswrapper[4998]: E0203 06:47:12.428957 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.446795 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.453834 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 16:45:57.840305111 +0000 UTC Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.462453 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.476038 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.488751 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.506550 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.506592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.506605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.506622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.506635 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.519157 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.537163 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.573903 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.591824 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.606799 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.608656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.608680 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.608695 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.608709 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.608719 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.621128 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.631987 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.641922 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.651633 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.663136 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.674056 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.687403 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.702739 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.710339 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.710384 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.710397 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.710412 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.710421 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.719603 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.813475 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.813505 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.813513 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.813524 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.813533 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.815173 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dm4vz_2cba0dd3-b238-4ad4-9517-e2bf7d30b635/kube-multus/0.log" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.815275 4998 generic.go:334] "Generic (PLEG): container finished" podID="2cba0dd3-b238-4ad4-9517-e2bf7d30b635" containerID="91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92" exitCode=1 Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.815352 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerDied","Data":"91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.815702 4998 scope.go:117] "RemoveContainer" containerID="91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.830292 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.843313 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.857471 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:47:11Z\\\",\\\"message\\\":\\\"2026-02-03T06:46:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264\\\\n2026-02-03T06:46:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264 to /host/opt/cni/bin/\\\\n2026-02-03T06:46:26Z [verbose] multus-daemon started\\\\n2026-02-03T06:46:26Z [verbose] Readiness Indicator file check\\\\n2026-02-03T06:47:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.870879 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.883401 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.899085 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.909773 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.915504 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.915531 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.915540 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.915551 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.915560 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:12Z","lastTransitionTime":"2026-02-03T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.921856 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.936052 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.953597 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfc28205a03a67a974a7963431c89c78a6f766b5736c413cd633730a2faca477\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"message\\\":\\\"LB{services.LB{Name:\\\\\\\"Service_openshift-console-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0203 06:46:35.512722 6480 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0203 06:46:35.512931 6480 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.967889 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.981038 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:12 crc kubenswrapper[4998]: I0203 06:47:12.994489 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:12Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.016913 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.018111 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.018152 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.018164 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.018181 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.018193 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.033981 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.046609 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.058173 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.068892 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.120381 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.120450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.120464 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.120485 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.120499 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.223202 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.223302 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.223335 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.223369 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.223388 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.326120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.326584 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.326750 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.326954 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.327101 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.331217 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.332089 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:47:13 crc kubenswrapper[4998]: E0203 06:47:13.332332 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.345270 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.356383 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.370149 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.383583 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.403097 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.414608 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.423986 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.431744 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.431841 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.431859 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.431878 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.431919 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.448306 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.454268 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 21:47:50.479689222 +0000 UTC Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.461091 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.472179 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.482072 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.491342 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.501855 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.513429 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.524232 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.535589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.535642 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.535667 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.535689 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.535704 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.537143 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:47:11Z\\\",\\\"message\\\":\\\"2026-02-03T06:46:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264\\\\n2026-02-03T06:46:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264 to /host/opt/cni/bin/\\\\n2026-02-03T06:46:26Z [verbose] multus-daemon started\\\\n2026-02-03T06:46:26Z [verbose] Readiness Indicator file check\\\\n2026-02-03T06:47:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.547581 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.559348 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.638698 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.638746 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.638759 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.638782 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.638846 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.741112 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.741145 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.741154 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.741166 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.741174 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.820681 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dm4vz_2cba0dd3-b238-4ad4-9517-e2bf7d30b635/kube-multus/0.log" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.820768 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerStarted","Data":"d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.841847 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.842984 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.843041 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.843064 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.843088 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.843116 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.857926 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.872552 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.887883 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.906718 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.921531 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.935363 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946308 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946353 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946365 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946385 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946396 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:13Z","lastTransitionTime":"2026-02-03T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.946970 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.971556 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:13 crc kubenswrapper[4998]: I0203 06:47:13.986870 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.001877 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:13Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.016036 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.025841 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.039767 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.048638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.048674 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.048683 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.048698 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.048707 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.050664 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.062493 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:47:11Z\\\",\\\"message\\\":\\\"2026-02-03T06:46:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264\\\\n2026-02-03T06:46:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264 to /host/opt/cni/bin/\\\\n2026-02-03T06:46:26Z [verbose] multus-daemon started\\\\n2026-02-03T06:46:26Z [verbose] Readiness Indicator file check\\\\n2026-02-03T06:47:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:47:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.076064 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.087375 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:14Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.150613 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.150652 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.150664 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.150680 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.150692 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.253411 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.253467 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.253486 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.253508 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.253526 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.356300 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.356353 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.356362 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.356374 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.356383 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.426754 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.426808 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.426833 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:14 crc kubenswrapper[4998]: E0203 06:47:14.427179 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:14 crc kubenswrapper[4998]: E0203 06:47:14.426992 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:14 crc kubenswrapper[4998]: E0203 06:47:14.427296 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.426833 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:14 crc kubenswrapper[4998]: E0203 06:47:14.427516 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.454831 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 21:16:45.057285198 +0000 UTC Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.463128 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.463233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.463279 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.463326 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.463338 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.565087 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.565128 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.565140 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.565158 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.565171 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.667938 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.667982 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.667998 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.668013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.668023 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.771030 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.771071 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.771079 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.771093 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.771101 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.873068 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.873139 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.873156 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.873173 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.873185 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.975709 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.975818 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.975832 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.975846 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:14 crc kubenswrapper[4998]: I0203 06:47:14.975858 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:14Z","lastTransitionTime":"2026-02-03T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.078366 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.078421 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.078438 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.078462 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.078479 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.180922 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.180961 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.180971 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.180986 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.180997 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.283266 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.283329 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.283342 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.283361 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.283373 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.385968 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.386042 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.386057 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.386072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.386083 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.455746 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 20:46:36.952706372 +0000 UTC Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.489379 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.489471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.489483 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.489503 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.489515 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.592501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.592565 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.592579 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.592626 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.592640 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.695254 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.695305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.695318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.695374 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.695388 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.798066 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.798116 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.798131 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.798190 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.798227 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.900295 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.900345 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.900360 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.900384 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:15 crc kubenswrapper[4998]: I0203 06:47:15.900400 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:15Z","lastTransitionTime":"2026-02-03T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.003294 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.003333 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.003344 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.003360 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.003371 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.106218 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.106266 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.106275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.106290 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.106299 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.208891 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.208949 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.208961 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.208983 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.208997 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.311336 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.311380 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.311400 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.311424 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.311436 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.413445 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.413495 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.413511 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.413527 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.413538 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.426985 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.427011 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:16 crc kubenswrapper[4998]: E0203 06:47:16.427083 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.427207 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:16 crc kubenswrapper[4998]: E0203 06:47:16.427271 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.427316 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:16 crc kubenswrapper[4998]: E0203 06:47:16.427429 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:16 crc kubenswrapper[4998]: E0203 06:47:16.427583 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.456193 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 16:17:25.971139862 +0000 UTC Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.516486 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.516531 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.516540 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.517139 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.517167 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.620194 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.620270 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.620291 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.620318 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.620338 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.722822 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.722866 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.722879 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.722894 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.722904 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.825848 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.825911 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.825922 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.825950 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.825968 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.929353 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.929430 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.929448 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.929473 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:16 crc kubenswrapper[4998]: I0203 06:47:16.929493 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:16Z","lastTransitionTime":"2026-02-03T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.032611 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.032682 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.032701 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.032727 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.032749 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.135471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.135512 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.135523 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.135538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.135550 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.238254 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.238310 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.238326 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.238350 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.238368 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.341624 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.341674 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.341685 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.341704 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.341715 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.443606 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.443646 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.443655 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.443670 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.443680 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.456940 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:14:14.443285275 +0000 UTC Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.546133 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.546185 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.546197 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.546215 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.546229 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.649157 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.649230 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.649255 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.649286 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.649328 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.752057 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.752103 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.752114 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.752129 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.752139 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.854853 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.854883 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.854892 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.854906 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.854918 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.957812 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.957852 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.957864 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.957881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:17 crc kubenswrapper[4998]: I0203 06:47:17.957892 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:17Z","lastTransitionTime":"2026-02-03T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.060287 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.060311 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.060320 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.060333 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.060341 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.162838 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.162881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.162894 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.162912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.162923 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.265519 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.265617 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.265645 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.265681 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.265706 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.369186 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.369250 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.369268 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.369317 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.369335 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.427219 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.427323 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.427417 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:18 crc kubenswrapper[4998]: E0203 06:47:18.427415 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.427249 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:18 crc kubenswrapper[4998]: E0203 06:47:18.427605 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:18 crc kubenswrapper[4998]: E0203 06:47:18.427736 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:18 crc kubenswrapper[4998]: E0203 06:47:18.427906 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.457557 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 19:25:22.539193639 +0000 UTC Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.472757 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.472876 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.472902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.472933 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.472956 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.575902 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.575968 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.575989 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.576017 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.576038 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.678916 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.678967 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.678978 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.678993 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.679007 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.782476 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.782531 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.782541 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.782559 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.782574 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.886034 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.886087 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.886099 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.886117 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.886130 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.989001 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.989092 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.989116 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.989145 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:18 crc kubenswrapper[4998]: I0203 06:47:18.989167 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:18Z","lastTransitionTime":"2026-02-03T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.092442 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.092512 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.092534 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.092561 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.092582 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.195971 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.196041 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.196058 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.196082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.196099 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.299101 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.299152 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.299162 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.299178 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.299193 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.378600 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.378638 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.378654 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.378670 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.378681 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.393051 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:19Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.397480 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.397514 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.397524 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.397538 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.397549 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.409975 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:19Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.414393 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.414445 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.414459 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.414477 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.414488 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.426931 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:19Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.430347 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.430387 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.430398 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.430414 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.430424 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.443107 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:19Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.446054 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.446091 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.446103 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.446121 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.446133 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.457185 4998 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29c0cf50-7633-4e19-9ce0-3cf9f0cae181\\\",\\\"systemUUID\\\":\\\"2c417530-aeae-4e8e-ac67-d425307cb93c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:19Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:19 crc kubenswrapper[4998]: E0203 06:47:19.457393 4998 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.457892 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 15:09:54.030452289 +0000 UTC Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.459133 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.459160 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.459171 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.459187 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.459199 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.562074 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.562134 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.562151 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.562219 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.562238 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.665924 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.666072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.666099 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.666130 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.666153 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.769316 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.769379 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.769396 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.769420 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.769438 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.872891 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.872976 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.873002 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.873033 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.873055 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.975881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.975956 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.975976 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.976003 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:19 crc kubenswrapper[4998]: I0203 06:47:19.976024 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:19Z","lastTransitionTime":"2026-02-03T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.078574 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.078619 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.078634 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.078652 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.078664 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.181769 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.181836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.181848 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.181867 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.181881 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.284746 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.284839 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.284864 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.284892 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.284912 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.388310 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.388360 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.388370 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.388387 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.388398 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.426642 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.426642 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:20 crc kubenswrapper[4998]: E0203 06:47:20.426774 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:20 crc kubenswrapper[4998]: E0203 06:47:20.426881 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.426883 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.426921 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:20 crc kubenswrapper[4998]: E0203 06:47:20.426962 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:20 crc kubenswrapper[4998]: E0203 06:47:20.427137 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.458379 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 23:13:40.119741946 +0000 UTC Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.490904 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.490941 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.490954 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.491011 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.491025 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.595364 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.595439 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.595458 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.595481 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.595501 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.698645 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.698695 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.698709 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.698729 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.698742 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.802021 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.802282 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.802391 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.802518 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.802597 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.905387 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.905459 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.905485 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.905513 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:20 crc kubenswrapper[4998]: I0203 06:47:20.905531 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:20Z","lastTransitionTime":"2026-02-03T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.009054 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.009466 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.009631 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.009767 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.009953 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.114013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.114062 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.114078 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.114102 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.114119 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.217650 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.217717 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.217734 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.217758 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.217776 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.321540 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.321604 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.321622 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.321649 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.321666 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.424576 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.424912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.425095 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.425226 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.425359 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.458477 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 11:32:52.909480004 +0000 UTC Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.528424 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.529212 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.529344 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.529472 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.529597 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.632027 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.632063 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.632072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.632088 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.632098 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.735233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.735449 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.735575 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.735690 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.735777 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.838529 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.838579 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.838592 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.838613 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.838627 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.941364 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.941728 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.941900 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.942051 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:21 crc kubenswrapper[4998]: I0203 06:47:21.942202 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:21Z","lastTransitionTime":"2026-02-03T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.045048 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.045104 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.045119 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.045142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.045159 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.147311 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.147351 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.147363 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.147379 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.147390 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.250477 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.250507 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.250516 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.250530 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.250540 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.352385 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.352427 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.352442 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.352455 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.352465 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.427420 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.427420 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.427541 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.427591 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:22 crc kubenswrapper[4998]: E0203 06:47:22.428336 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:22 crc kubenswrapper[4998]: E0203 06:47:22.428892 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:22 crc kubenswrapper[4998]: E0203 06:47:22.428978 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:22 crc kubenswrapper[4998]: E0203 06:47:22.428727 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.445002 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.454501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.454566 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.454583 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.454605 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.454623 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.458972 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 20:51:44.164362243 +0000 UTC Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.461442 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da1f1740-2fdd-4e7d-a740-039b8d39cfcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981f8ad138cf566afc1dc984e1549e0dabb353e61469585343b399ef799f2b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2hj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v9x5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.476016 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee532c209b30e3e2e21dc079ea01aa8bde392a9f42452a24ad5a0c34ca81632d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.493262 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d22be91149878dccbc8e538045eaf67301beefef286fd92c7e167337930f99b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0c3a3dd3295e8c1865539192e21489cd44235205b1b770db8a099907473d290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.505417 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d754bb9fdbc001ab9d9a1ce51b99180c387ebda84e0a5b4d1fe9af76c6c258fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.523037 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-plgnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3ecb2ed-c8b2-4932-aa75-a45f010cd2d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0226ecd8c9fa202b4c46cf30a396c7bd55814fe45ec536e4fff2c0f09d6aa371\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5z4jf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-plgnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.534201 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-crsvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16316a01-0118-4b01-81cb-13e869b62484\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f9d449e60417a58eed750cbff1f8c1c97a740e68bbd349f3e8b98f64e749d12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfq5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:22Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-crsvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.556958 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.557200 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.557279 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.557362 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.557505 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.556873 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f202cc16-8c9d-4e03-bd1d-0716f02a8ee7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://413ae0620494cf4c5807c78ecdfda671fd63b7aaefa96b63564212f57b34f9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4741ef3fd48469b5eef0e17e68ef7b20a3929017ff3939ee975b8a3751cfe965\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b741c846b0b5468ec9b52e16778e89632de68f37c0bd5b1fc375cc019a8c959c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f5dcbfe7ff012e1060f79ff8f126f7f86d92d4cd113f390c41f3bdbb7e9baf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4248f5a0e550f2e4e6aa126875469ca31b11f40cf885b3d636c4d97925cb7fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d60ec0ba44cd62839518cbf7f3e509cae976f2e2479fe9b1df6f030428556de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5eaef0b3cae83a33e7a98534f15da6d83ab5c91de41cf06e7cf714819dacbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35c2280213e1938b4674047c14b09df3120e06231b6395a3d536ac76a7bd100d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.569860 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dm4vz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2cba0dd3-b238-4ad4-9517-e2bf7d30b635\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:47:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:47:11Z\\\",\\\"message\\\":\\\"2026-02-03T06:46:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264\\\\n2026-02-03T06:46:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6be5017b-98fb-4a0e-90b4-8f089f26b264 to /host/opt/cni/bin/\\\\n2026-02-03T06:46:26Z [verbose] multus-daemon started\\\\n2026-02-03T06:46:26Z [verbose] Readiness Indicator file check\\\\n2026-02-03T06:47:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:47:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t8ch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dm4vz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.578823 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"435a5c9b-684c-42ee-9519-13c14510718e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://994c5aef2744f3eed8f7fdbbe4836e8a63c096f5ec1ef330a75f626e54ac2dad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://58610e96b03ce7c48ff10de9c3655300c6d3a0ee834827f4d06a912b72693735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssscp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-g7trx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.590052 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s5wml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df2ae20-0aeb-4b1e-a408-c1903e061833\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwcm6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s5wml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.602919 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48b7853e-6cfe-4818-8635-2b12f2be8097\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://66250fee433e0a4da7f8a56ca4c5299eed2c156035640d6ef9e401c81a60abfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d261e399c0e0ab21edf85442c34085baa35a4d89963fe29dfc815ec96172d14\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4be2f3e13f93718e31ed1d5f95cc05eb5e2996825afabed9e742a839cf2949a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.615765 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.627000 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.640678 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e25d5a00-1315-4327-aadd-fd81e45fb023\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d0b49de3f605b693ae64a05177cab0247aa357fafaba47ec21c7982f17c47f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c66df0a5def8d9147ebed381fa91242dea6208c34850a8214dad4a31191d7d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece71ebf17f81988459f6c9179f8fa593a51f546f8cd76e242ca7c81f4909ea7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd6007c3e9c1ef200c7bdf51593fc5ae902498bd3c2f37cec231f4eee98cc4a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457ac2907cc83b169513bc4a466b5cfa18ea74a4029e14093ab4a60a21c34546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8db384636cb8da6598e692883abc42a2a891b5205043982191777bbe1f09185f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f34bbade3adb7c58e64204297785b40b55d8fad166f03bd49a12d62a1db8f63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmgl6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5qjsn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.659958 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.659992 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.660002 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.660015 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.660024 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.661843 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7418b1d-9f7d-48cd-aac4-6a1b85967841\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T06:46:59Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-g7trx openshift-image-registry/node-ca-crsvv openshift-multus/multus-additional-cni-plugins-5qjsn openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-kube-apiserver/kube-apiserver-crc]\\\\nI0203 06:46:59.679203 6741 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0203 06:46:59.679227 6741 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679250 6741 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679266 6741 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0203 06:46:59.679282 6741 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0203 06:46:59.679293 6741 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0203 06:46:59.679317 6741 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0203 06:46:59.679402 6741 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7b8d_openshift-ovn-kubernetes(f7418b1d-9f7d-48cd-aac4-6a1b85967841)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9zmz2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7b8d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.674323 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c8c93fe-ba86-4899-a018-d24fb324de5c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T06:46:16Z\\\",\\\"message\\\":\\\"W0203 06:46:05.531717 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 06:46:05.532068 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770101165 cert, and key in /tmp/serving-cert-4006352559/serving-signer.crt, /tmp/serving-cert-4006352559/serving-signer.key\\\\nI0203 06:46:05.809216 1 observer_polling.go:159] Starting file observer\\\\nW0203 06:46:05.812530 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 06:46:05.812748 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 06:46:05.815997 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4006352559/tls.crt::/tmp/serving-cert-4006352559/tls.key\\\\\\\"\\\\nF0203 06:46:16.112630 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.685981 4998 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5102bfac-a880-4d5d-9b91-5660ef907547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T06:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479c3c16ba3884ad61ade6ebbeb9106ee755a65deb7b35d2f1b5f4286106cf6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168b94ff897b67371c327429bf5be07500e7f0850442d898d18bd58b56d81378\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a620b1d9fc8b57fa7c51cf93b34dbe13149fba7a8ba0fa6f89a04704cf6bedc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec2aa82451c5d9674cd665f7a0888ae62beaf3007867e70775198813f4ae57eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T06:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T06:46:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T06:46:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T06:47:22Z is after 2025-08-24T17:21:41Z" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.763217 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.763276 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.763289 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.763309 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.763322 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.866690 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.866745 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.866762 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.866821 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.866839 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.968848 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.968881 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.968888 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.968901 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:22 crc kubenswrapper[4998]: I0203 06:47:22.968909 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:22Z","lastTransitionTime":"2026-02-03T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.072211 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.072273 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.072285 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.072306 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.072320 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.175189 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.175257 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.175279 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.175309 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.175330 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.281459 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.281496 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.281505 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.281518 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.281526 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.384026 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.384075 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.384089 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.384105 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.384116 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.459946 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 09:27:09.238279136 +0000 UTC Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.486819 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.486869 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.486883 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.486899 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.486908 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.589225 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.589264 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.589274 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.589287 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.589297 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.691275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.691315 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.691325 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.691339 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.691349 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.794142 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.794216 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.794228 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.794267 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.794281 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.896267 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.896321 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.896335 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.896355 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.896370 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.999438 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.999513 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.999537 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.999568 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:23 crc kubenswrapper[4998]: I0203 06:47:23.999591 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:23Z","lastTransitionTime":"2026-02-03T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.102085 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.102136 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.102164 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.102189 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.102205 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.205077 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.205119 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.205130 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.205146 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.205158 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.307122 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.307201 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.307226 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.307256 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.307276 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.409993 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.410025 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.410033 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.410048 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.410057 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.427143 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.427202 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.427228 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.427286 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:24 crc kubenswrapper[4998]: E0203 06:47:24.427319 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:24 crc kubenswrapper[4998]: E0203 06:47:24.427558 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:24 crc kubenswrapper[4998]: E0203 06:47:24.427743 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:24 crc kubenswrapper[4998]: E0203 06:47:24.427823 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.440364 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.460484 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 21:42:24.8425789 +0000 UTC Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.512501 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.512557 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.512575 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.512598 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.512617 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.616305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.616394 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.616412 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.616437 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.616454 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.719224 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.719275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.719288 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.719305 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.719321 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.822314 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.822373 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.822388 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.822409 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.822423 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.926431 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.926503 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.926524 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.926550 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:24 crc kubenswrapper[4998]: I0203 06:47:24.926569 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:24Z","lastTransitionTime":"2026-02-03T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.030147 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.030201 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.030217 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.030240 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.030257 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.132585 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.132618 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.132626 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.132639 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.132648 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.235389 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.235443 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.235460 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.235482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.235498 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.339215 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.339475 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.339559 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.339687 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.339817 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.443484 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.443556 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.443577 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.443606 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.443629 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.460860 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 23:01:38.112535795 +0000 UTC Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.546352 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.546742 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.546978 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.547120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.547257 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.650335 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.651002 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.651140 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.651247 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.651341 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.754760 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.754814 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.754822 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.754836 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.754845 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.858773 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.858885 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.858910 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.858938 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.858957 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.962243 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.962323 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.962346 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.962376 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:25 crc kubenswrapper[4998]: I0203 06:47:25.962400 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:25Z","lastTransitionTime":"2026-02-03T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.065334 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.065430 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.065449 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.065471 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.065486 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.168204 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.168258 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.168275 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.168295 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.168311 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.212975 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.213135 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:48:30.213112409 +0000 UTC m=+148.499806235 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.272135 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.272176 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.272187 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.272203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.272214 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.313866 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.313919 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.313955 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.313982 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314081 4998 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314084 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314107 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314119 4998 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314129 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:48:30.314116658 +0000 UTC m=+148.600810464 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314147 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 06:48:30.314138218 +0000 UTC m=+148.600832024 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314166 4998 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314207 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314255 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 06:48:30.314226421 +0000 UTC m=+148.600920267 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314261 4998 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314284 4998 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.314359 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 06:48:30.314336054 +0000 UTC m=+148.601030020 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.375024 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.375070 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.375083 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.375099 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.375110 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.427407 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.427525 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.427440 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.427612 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.427685 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.428024 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.428473 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:26 crc kubenswrapper[4998]: E0203 06:47:26.428556 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.429048 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.461163 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 03:08:05.850819933 +0000 UTC Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.478188 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.478233 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.478244 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.478260 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.478272 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.581830 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.582242 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.582368 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.582445 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.582570 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.685011 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.685083 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.685104 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.685135 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.685161 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.788827 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.788883 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.788893 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.788910 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.788921 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.871967 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/2.log" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.877161 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerStarted","Data":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.877927 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.891152 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.891203 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.891214 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.891240 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.891254 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.924054 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-plgnf" podStartSLOduration=64.924036339 podStartE2EDuration="1m4.924036339s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:26.923611926 +0000 UTC m=+85.210305732" watchObservedRunningTime="2026-02-03 06:47:26.924036339 +0000 UTC m=+85.210730145" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.946582 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-crsvv" podStartSLOduration=64.946394557 podStartE2EDuration="1m4.946394557s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:26.935317247 +0000 UTC m=+85.222011053" watchObservedRunningTime="2026-02-03 06:47:26.946394557 +0000 UTC m=+85.233088363" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.972677 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=2.9726617060000002 podStartE2EDuration="2.972661706s" podCreationTimestamp="2026-02-03 06:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:26.946918374 +0000 UTC m=+85.233612180" watchObservedRunningTime="2026-02-03 06:47:26.972661706 +0000 UTC m=+85.259355512" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.987025 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=64.987003817 podStartE2EDuration="1m4.987003817s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:26.973240624 +0000 UTC m=+85.259934430" watchObservedRunningTime="2026-02-03 06:47:26.987003817 +0000 UTC m=+85.273697623" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.993573 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.993621 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.993636 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.993656 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:26 crc kubenswrapper[4998]: I0203 06:47:26.993670 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:26Z","lastTransitionTime":"2026-02-03T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.032680 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=62.032660372 podStartE2EDuration="1m2.032660372s" podCreationTimestamp="2026-02-03 06:46:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.032158347 +0000 UTC m=+85.318852173" watchObservedRunningTime="2026-02-03 06:47:27.032660372 +0000 UTC m=+85.319354178" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.068735 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-dm4vz" podStartSLOduration=65.068713282 podStartE2EDuration="1m5.068713282s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.068398722 +0000 UTC m=+85.355092538" watchObservedRunningTime="2026-02-03 06:47:27.068713282 +0000 UTC m=+85.355407088" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.083800 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-g7trx" podStartSLOduration=64.083762555 podStartE2EDuration="1m4.083762555s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.083703963 +0000 UTC m=+85.370397779" watchObservedRunningTime="2026-02-03 06:47:27.083762555 +0000 UTC m=+85.370456361" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.095625 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.095678 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.095690 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.095707 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.095719 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.111258 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podStartSLOduration=65.111243431 podStartE2EDuration="1m5.111243431s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.110162218 +0000 UTC m=+85.396856044" watchObservedRunningTime="2026-02-03 06:47:27.111243431 +0000 UTC m=+85.397937237" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.140977 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.140953895 podStartE2EDuration="38.140953895s" podCreationTimestamp="2026-02-03 06:46:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.14076584 +0000 UTC m=+85.427459656" watchObservedRunningTime="2026-02-03 06:47:27.140953895 +0000 UTC m=+85.427647701" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.141768 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=66.14176081 podStartE2EDuration="1m6.14176081s" podCreationTimestamp="2026-02-03 06:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.125395617 +0000 UTC m=+85.412089453" watchObservedRunningTime="2026-02-03 06:47:27.14176081 +0000 UTC m=+85.428454616" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.170887 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-5qjsn" podStartSLOduration=65.170867616 podStartE2EDuration="1m5.170867616s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.170131753 +0000 UTC m=+85.456825569" watchObservedRunningTime="2026-02-03 06:47:27.170867616 +0000 UTC m=+85.457561422" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.196681 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podStartSLOduration=65.19666519 podStartE2EDuration="1m5.19666519s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:27.196383701 +0000 UTC m=+85.483077527" watchObservedRunningTime="2026-02-03 06:47:27.19666519 +0000 UTC m=+85.483358996" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.197698 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.197728 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.197740 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.197752 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.197762 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.300855 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-s5wml"] Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.301022 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:27 crc kubenswrapper[4998]: E0203 06:47:27.301173 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.302589 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.302623 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.302632 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.302646 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.302656 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.405686 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.406042 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.406055 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.406073 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.406087 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.461892 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 18:51:06.171543983 +0000 UTC Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.510455 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.510489 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.510499 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.510516 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.510527 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.613992 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.614043 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.614060 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.614082 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.614101 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.717075 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.717168 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.717200 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.717232 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.717256 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.820072 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.820120 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.820132 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.820149 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.820171 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.923557 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.923587 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.923595 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.923607 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:27 crc kubenswrapper[4998]: I0203 06:47:27.923618 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:27Z","lastTransitionTime":"2026-02-03T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.026323 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.026400 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.026422 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.026662 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.026685 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.129814 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.129886 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.129896 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.129912 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.129923 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.232085 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.232143 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.232159 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.232185 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.232203 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.334847 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.334887 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.334896 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.334909 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.334917 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.426467 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.426510 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.426467 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:28 crc kubenswrapper[4998]: E0203 06:47:28.426607 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 06:47:28 crc kubenswrapper[4998]: E0203 06:47:28.426726 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 06:47:28 crc kubenswrapper[4998]: E0203 06:47:28.426861 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.436964 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.436993 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.437001 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.437013 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.437024 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.462566 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 22:09:25.081757876 +0000 UTC Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.539450 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.539500 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.539515 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.539536 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.539552 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.642382 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.642421 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.642432 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.642448 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.642459 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.745009 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.745075 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.745092 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.745117 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.745137 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.848940 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.849024 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.849049 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.849080 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.849104 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.951924 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.951985 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.952006 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.952034 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:28 crc kubenswrapper[4998]: I0203 06:47:28.952059 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:28Z","lastTransitionTime":"2026-02-03T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.055131 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.055197 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.055223 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.055251 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.055273 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:29Z","lastTransitionTime":"2026-02-03T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.157442 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.157488 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.157503 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.157520 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.157533 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:29Z","lastTransitionTime":"2026-02-03T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.260557 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.260614 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.260629 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.260651 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.260666 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:29Z","lastTransitionTime":"2026-02-03T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.363354 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.363400 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.363482 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.363522 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.363536 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:29Z","lastTransitionTime":"2026-02-03T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.426569 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:29 crc kubenswrapper[4998]: E0203 06:47:29.426813 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s5wml" podUID="7df2ae20-0aeb-4b1e-a408-c1903e061833" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.463314 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 01:15:56.606259903 +0000 UTC Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.466459 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.466499 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.466512 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.466531 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.466546 4998 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T06:47:29Z","lastTransitionTime":"2026-02-03T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.569220 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.569312 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.569350 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.569388 4998 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.569588 4998 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.608144 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hj4tg"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.608732 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.613594 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.614347 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.617116 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.617289 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.617388 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.617695 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.618086 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.618135 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.618737 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.618947 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.619034 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.619389 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.623051 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.623309 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.623874 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.623912 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.624701 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.627120 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.627124 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.628119 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.628332 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.628960 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.629485 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.630544 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4dbn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.631237 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.632109 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.642650 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.643042 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.643249 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.643511 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.643723 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.644912 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.645205 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.645328 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.645956 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.647900 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648034 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648300 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648848 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2646589f-a725-4093-a8c6-0f5fed8105ac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648899 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-dir\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648935 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-auth-proxy-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648967 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.648995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2646589f-a725-4093-a8c6-0f5fed8105ac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649024 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-config\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649052 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-encryption-config\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649080 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkqfc\" (UniqueName: \"kubernetes.io/projected/94dd5306-7adc-46f9-839e-f40d15cb7d7e-kube-api-access-wkqfc\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649111 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649138 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649162 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649204 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649229 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-encryption-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649252 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649255 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-client\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649356 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649375 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649387 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649418 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649440 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-audit\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649442 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649464 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-policies\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649483 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649508 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649529 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fe24ff5-9d08-432e-8888-ea97d44ec783-serving-cert\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649554 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-etcd-client\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649580 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649595 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649619 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649662 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649693 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649720 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-node-pullsecrets\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649747 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-image-import-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649809 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649845 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649774 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ptqw\" (UniqueName: \"kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649955 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2646589f-a725-4093-a8c6-0f5fed8105ac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649976 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.649986 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-serving-cert\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650015 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-serving-cert\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650041 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650105 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-audit-dir\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650131 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650142 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.660196 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661500 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.650171 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661695 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79kpk\" (UniqueName: \"kubernetes.io/projected/2efa0050-8a68-418e-9adc-1d9ecc563b11-kube-api-access-79kpk\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661745 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f8zk\" (UniqueName: \"kubernetes.io/projected/683b2874-b3e0-4844-a257-583438c3617c-kube-api-access-8f8zk\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661776 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-etcd-serving-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661836 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s5x9\" (UniqueName: \"kubernetes.io/projected/5fe24ff5-9d08-432e-8888-ea97d44ec783-kube-api-access-6s5x9\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661862 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661913 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2efa0050-8a68-418e-9adc-1d9ecc563b11-machine-approver-tls\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.661935 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5959\" (UniqueName: \"kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.666508 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.666889 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.669634 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.679011 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.679123 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.679798 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.680261 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.680544 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.681234 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hf97k"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.681992 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.682175 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.682536 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.682741 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.682912 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.683506 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.683680 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.684666 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.684813 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.685164 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.686950 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.687033 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.687381 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.688301 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.689419 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.689874 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.691085 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.699173 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mqrkr"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.706054 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.706447 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzzcb"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.706657 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-l249c"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.706869 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zx8sw"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707153 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707367 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707421 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707602 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707889 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.707958 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.708322 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.708598 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.708915 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.709050 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.710843 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.711483 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.712949 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.714063 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.714715 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.714950 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715396 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715582 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715698 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715870 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715889 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.715977 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716111 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716245 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716357 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716485 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716596 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716700 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.716848 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717139 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717383 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717576 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717625 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717740 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717943 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.717954 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.719106 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.719398 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.719763 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.719930 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720250 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.721665 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720970 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.721836 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.722118 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.722339 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.722335 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.737370 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720357 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720400 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720436 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.720947 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.721158 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.732752 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.734992 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.735754 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.736376 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.743999 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.748487 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.749475 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.752010 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.754409 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.754418 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.755668 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.755563 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.757736 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pbbqh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.760581 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l7cxx"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.761162 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.761290 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.762217 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.762594 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.762871 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.764699 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766046 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4daf44c8-ba33-4d28-85a0-e8110bce8117-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766098 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f8zk\" (UniqueName: \"kubernetes.io/projected/683b2874-b3e0-4844-a257-583438c3617c-kube-api-access-8f8zk\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766119 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-etcd-serving-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766138 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766163 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s5x9\" (UniqueName: \"kubernetes.io/projected/5fe24ff5-9d08-432e-8888-ea97d44ec783-kube-api-access-6s5x9\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766186 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-client\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766206 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg7gd\" (UniqueName: \"kubernetes.io/projected/8d9bf04a-afb1-49d2-990b-a094855aadd7-kube-api-access-gg7gd\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766228 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766267 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2efa0050-8a68-418e-9adc-1d9ecc563b11-machine-approver-tls\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766284 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5959\" (UniqueName: \"kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766303 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d9bf04a-afb1-49d2-990b-a094855aadd7-proxy-tls\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766323 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aaa1428-ac87-459e-bcda-7824f9569593-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766344 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2646589f-a725-4093-a8c6-0f5fed8105ac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766364 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7zzr\" (UniqueName: \"kubernetes.io/projected/40b4149a-acf4-4ccc-be3c-4471354e69d2-kube-api-access-t7zzr\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766389 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-dir\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766406 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-config\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766424 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-auth-proxy-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766443 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2646589f-a725-4093-a8c6-0f5fed8105ac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766476 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpnk6\" (UniqueName: \"kubernetes.io/projected/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-kube-api-access-kpnk6\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766493 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-encryption-config\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766512 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766534 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkqfc\" (UniqueName: \"kubernetes.io/projected/94dd5306-7adc-46f9-839e-f40d15cb7d7e-kube-api-access-wkqfc\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766554 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766575 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766592 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766622 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766640 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-encryption-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766659 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-client\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766678 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766698 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-config\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766717 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-audit\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766736 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766753 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766771 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-policies\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766803 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h672l\" (UniqueName: \"kubernetes.io/projected/9ed713f2-843a-4706-a6bc-2d59bd9d2c44-kube-api-access-h672l\") pod \"downloads-7954f5f757-l249c\" (UID: \"9ed713f2-843a-4706-a6bc-2d59bd9d2c44\") " pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766837 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-service-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766854 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766882 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766899 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fe24ff5-9d08-432e-8888-ea97d44ec783-serving-cert\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766913 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-etcd-client\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766929 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40b4149a-acf4-4ccc-be3c-4471354e69d2-serving-cert\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766953 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.766979 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767002 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767021 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767039 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-node-pullsecrets\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767058 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-image-import-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767074 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ptqw\" (UniqueName: \"kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767092 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4daf44c8-ba33-4d28-85a0-e8110bce8117-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767110 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aaa1428-ac87-459e-bcda-7824f9569593-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767128 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2646589f-a725-4093-a8c6-0f5fed8105ac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767146 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-serving-cert\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767162 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aaa1428-ac87-459e-bcda-7824f9569593-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767179 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-serving-cert\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767197 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767213 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-audit-dir\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767230 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767246 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767262 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-serving-cert\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767277 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zt72\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-kube-api-access-6zt72\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767301 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79kpk\" (UniqueName: \"kubernetes.io/projected/2efa0050-8a68-418e-9adc-1d9ecc563b11-kube-api-access-79kpk\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767320 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-images\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767363 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/40b4149a-acf4-4ccc-be3c-4471354e69d2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.767941 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.769030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-config\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.769370 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-etcd-serving-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.769827 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2646589f-a725-4093-a8c6-0f5fed8105ac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.769885 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-dir\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.769915 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.770581 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.772072 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.773260 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.773347 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.773851 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.773915 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-node-pullsecrets\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.774628 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-image-import-ca\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.774999 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/94dd5306-7adc-46f9-839e-f40d15cb7d7e-audit-policies\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.776003 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-etcd-client\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.776687 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.777969 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778055 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-audit\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778242 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778598 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-auth-proxy-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778631 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2efa0050-8a68-418e-9adc-1d9ecc563b11-config\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778654 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/2646589f-a725-4093-a8c6-0f5fed8105ac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778935 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779310 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779447 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/683b2874-b3e0-4844-a257-583438c3617c-audit-dir\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.778633 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779552 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fe24ff5-9d08-432e-8888-ea97d44ec783-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779610 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779636 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.779964 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2efa0050-8a68-418e-9adc-1d9ecc563b11-machine-approver-tls\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.780240 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-serving-cert\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.781177 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.781514 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.781848 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.782175 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.782622 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fe24ff5-9d08-432e-8888-ea97d44ec783-serving-cert\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.782974 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.783424 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.783841 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.784472 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.784709 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.785261 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.786666 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-serving-cert\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.786673 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-etcd-client\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.786868 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/683b2874-b3e0-4844-a257-583438c3617c-encryption-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.787121 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.787737 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2646589f-a725-4093-a8c6-0f5fed8105ac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.791605 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.793157 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/94dd5306-7adc-46f9-839e-f40d15cb7d7e-encryption-config\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.796203 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/683b2874-b3e0-4844-a257-583438c3617c-config\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.797396 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.798061 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.798277 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t6spv"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.798608 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.799740 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.805337 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.811724 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.812146 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.815764 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.816364 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.816902 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.817314 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.817514 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.818381 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.819145 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.819441 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6dxmx"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.821194 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.826422 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hj4tg"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.831095 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4dbn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.831494 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.832502 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.832493 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.832924 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6dmdn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.834934 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.835043 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.835344 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.836474 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hf97k"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.837887 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-cp5t2"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.838734 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.838858 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.840827 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.842211 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzzcb"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.843095 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.844129 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.846507 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.847641 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.851151 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.857234 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l249c"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.858417 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.859821 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.860405 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t6spv"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.861722 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.862694 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.863770 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.864948 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zx8sw"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.865878 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mqrkr"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.866838 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pbbqh"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.867707 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868231 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-config\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868264 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h672l\" (UniqueName: \"kubernetes.io/projected/9ed713f2-843a-4706-a6bc-2d59bd9d2c44-kube-api-access-h672l\") pod \"downloads-7954f5f757-l249c\" (UID: \"9ed713f2-843a-4706-a6bc-2d59bd9d2c44\") " pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868290 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-service-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868310 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40b4149a-acf4-4ccc-be3c-4471354e69d2-serving-cert\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868352 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4daf44c8-ba33-4d28-85a0-e8110bce8117-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868379 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aaa1428-ac87-459e-bcda-7824f9569593-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aaa1428-ac87-459e-bcda-7824f9569593-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868415 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-serving-cert\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868445 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-images\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868463 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868480 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zt72\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-kube-api-access-6zt72\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868500 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/40b4149a-acf4-4ccc-be3c-4471354e69d2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868526 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4daf44c8-ba33-4d28-85a0-e8110bce8117-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868544 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868561 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-client\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868575 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg7gd\" (UniqueName: \"kubernetes.io/projected/8d9bf04a-afb1-49d2-990b-a094855aadd7-kube-api-access-gg7gd\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868602 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d9bf04a-afb1-49d2-990b-a094855aadd7-proxy-tls\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868617 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aaa1428-ac87-459e-bcda-7824f9569593-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868642 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7zzr\" (UniqueName: \"kubernetes.io/projected/40b4149a-acf4-4ccc-be3c-4471354e69d2-kube-api-access-t7zzr\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868660 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpnk6\" (UniqueName: \"kubernetes.io/projected/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-kube-api-access-kpnk6\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.868684 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.869047 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.869156 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-service-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.869298 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-ca\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.869458 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-config\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.869565 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/40b4149a-acf4-4ccc-be3c-4471354e69d2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.870061 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.870237 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4daf44c8-ba33-4d28-85a0-e8110bce8117-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.870382 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.871258 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.871274 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-p5fkm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.871478 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40b4149a-acf4-4ccc-be3c-4471354e69d2-serving-cert\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.871816 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4daf44c8-ba33-4d28-85a0-e8110bce8117-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.872193 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-etcd-client\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.873165 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.873259 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.873509 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.873673 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-serving-cert\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.874462 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l7cxx"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.875415 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.876350 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.877270 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.878220 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.880457 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cp5t2"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.881388 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.882312 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.883264 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.884427 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.886148 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-p5fkm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.887108 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6g4qm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.888503 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6g4qm"] Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.888619 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.896800 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.911904 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.921356 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aaa1428-ac87-459e-bcda-7824f9569593-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.932553 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.939892 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aaa1428-ac87-459e-bcda-7824f9569593-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.951057 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.960033 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d9bf04a-afb1-49d2-990b-a094855aadd7-images\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.971520 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.982120 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d9bf04a-afb1-49d2-990b-a094855aadd7-proxy-tls\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:29 crc kubenswrapper[4998]: I0203 06:47:29.991170 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.050896 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.076741 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.092388 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.112188 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.131470 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.151274 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.171506 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.191386 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.210980 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.232093 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.250624 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.271569 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.291669 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.311278 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.337830 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.351458 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.371565 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.391418 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.410605 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.427034 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.427542 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.427559 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.431163 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.452146 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.464197 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 00:42:59.881351818 +0000 UTC Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.464265 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.471765 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.514353 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkqfc\" (UniqueName: \"kubernetes.io/projected/94dd5306-7adc-46f9-839e-f40d15cb7d7e-kube-api-access-wkqfc\") pod \"apiserver-7bbb656c7d-z9wjq\" (UID: \"94dd5306-7adc-46f9-839e-f40d15cb7d7e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.530946 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s5x9\" (UniqueName: \"kubernetes.io/projected/5fe24ff5-9d08-432e-8888-ea97d44ec783-kube-api-access-6s5x9\") pod \"authentication-operator-69f744f599-w4dbn\" (UID: \"5fe24ff5-9d08-432e-8888-ea97d44ec783\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.551506 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5959\" (UniqueName: \"kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959\") pod \"console-f9d7485db-2wmmh\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.566282 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f8zk\" (UniqueName: \"kubernetes.io/projected/683b2874-b3e0-4844-a257-583438c3617c-kube-api-access-8f8zk\") pod \"apiserver-76f77b778f-hj4tg\" (UID: \"683b2874-b3e0-4844-a257-583438c3617c\") " pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.588418 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ptqw\" (UniqueName: \"kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw\") pod \"route-controller-manager-6576b87f9c-f2w4s\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.591894 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.613281 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.616754 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79kpk\" (UniqueName: \"kubernetes.io/projected/2efa0050-8a68-418e-9adc-1d9ecc563b11-kube-api-access-79kpk\") pod \"machine-approver-56656f9798-w9qkw\" (UID: \"2efa0050-8a68-418e-9adc-1d9ecc563b11\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.630566 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2646589f-a725-4093-a8c6-0f5fed8105ac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-47pmn\" (UID: \"2646589f-a725-4093-a8c6-0f5fed8105ac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.631579 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.650882 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.651142 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.664922 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.671510 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.691859 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.710941 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.741932 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.751505 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.771881 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.790147 4998 request.go:700] Waited for 1.007992174s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-dockercfg-k9rxt&limit=500&resourceVersion=0 Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.791795 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.811319 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.830331 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.833365 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.846262 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.851263 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.871267 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.879022 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4dbn"] Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.884475 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.888880 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" event={"ID":"2efa0050-8a68-418e-9adc-1d9ecc563b11","Type":"ContainerStarted","Data":"4b63131da60a5841f4bdddb4a278be5cb573c85d001d3ff7e8583069e6507dd9"} Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.892170 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 03 06:47:30 crc kubenswrapper[4998]: W0203 06:47:30.897982 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe24ff5_9d08_432e_8888_ea97d44ec783.slice/crio-5437aaaca408503583b0e83762aa03765f9a3a80f70482efb4d2757a3b505509 WatchSource:0}: Error finding container 5437aaaca408503583b0e83762aa03765f9a3a80f70482efb4d2757a3b505509: Status 404 returned error can't find the container with id 5437aaaca408503583b0e83762aa03765f9a3a80f70482efb4d2757a3b505509 Feb 03 06:47:30 crc kubenswrapper[4998]: W0203 06:47:30.900014 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2646589f_a725_4093_a8c6_0f5fed8105ac.slice/crio-3094314ac771ab6cfc470caddd5111a3f2fd14457f743d0fd56b85a1219c8742 WatchSource:0}: Error finding container 3094314ac771ab6cfc470caddd5111a3f2fd14457f743d0fd56b85a1219c8742: Status 404 returned error can't find the container with id 3094314ac771ab6cfc470caddd5111a3f2fd14457f743d0fd56b85a1219c8742 Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.914004 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.932214 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.952529 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.971822 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 03 06:47:30 crc kubenswrapper[4998]: I0203 06:47:30.992409 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.018035 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.018073 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.030600 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq"] Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.031726 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.033619 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hj4tg"] Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.053173 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.062192 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.071704 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 03 06:47:31 crc kubenswrapper[4998]: W0203 06:47:31.083680 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f8c8aee_306f_4517_80a8_61eb2ee886d7.slice/crio-965237083f7e5a2433ab438390949660413195fe93bc11e0e83a7053287d1a56 WatchSource:0}: Error finding container 965237083f7e5a2433ab438390949660413195fe93bc11e0e83a7053287d1a56: Status 404 returned error can't find the container with id 965237083f7e5a2433ab438390949660413195fe93bc11e0e83a7053287d1a56 Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.091432 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.114432 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.131066 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.151511 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.172537 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.192347 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.211064 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.231730 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.250999 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.271749 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.290800 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.318913 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.331915 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.351690 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.372487 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.392417 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.412115 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.427263 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.431705 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.451581 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.471963 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.491581 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.512068 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.531847 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.551006 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.572279 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.591114 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.611943 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.631537 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.651202 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.671628 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.690731 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.711721 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.731701 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.768685 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h672l\" (UniqueName: \"kubernetes.io/projected/9ed713f2-843a-4706-a6bc-2d59bd9d2c44-kube-api-access-h672l\") pod \"downloads-7954f5f757-l249c\" (UID: \"9ed713f2-843a-4706-a6bc-2d59bd9d2c44\") " pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.786390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aaa1428-ac87-459e-bcda-7824f9569593-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kqkpx\" (UID: \"1aaa1428-ac87-459e-bcda-7824f9569593\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.807980 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zt72\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-kube-api-access-6zt72\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.809219 4998 request.go:700] Waited for 1.939924435s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.825489 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4daf44c8-ba33-4d28-85a0-e8110bce8117-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vf25l\" (UID: \"4daf44c8-ba33-4d28-85a0-e8110bce8117\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.846742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7zzr\" (UniqueName: \"kubernetes.io/projected/40b4149a-acf4-4ccc-be3c-4471354e69d2-kube-api-access-t7zzr\") pod \"openshift-config-operator-7777fb866f-zqbnn\" (UID: \"40b4149a-acf4-4ccc-be3c-4471354e69d2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.866243 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpnk6\" (UniqueName: \"kubernetes.io/projected/e7361bde-fabd-4ec0-a6cc-aae41dcf4d05-kube-api-access-kpnk6\") pod \"etcd-operator-b45778765-nzzcb\" (UID: \"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.887512 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg7gd\" (UniqueName: \"kubernetes.io/projected/8d9bf04a-afb1-49d2-990b-a094855aadd7-kube-api-access-gg7gd\") pod \"machine-config-operator-74547568cd-bwhwc\" (UID: \"8d9bf04a-afb1-49d2-990b-a094855aadd7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.891087 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.895042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" event={"ID":"2f8c8aee-306f-4517-80a8-61eb2ee886d7","Type":"ContainerStarted","Data":"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.895101 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" event={"ID":"2f8c8aee-306f-4517-80a8-61eb2ee886d7","Type":"ContainerStarted","Data":"965237083f7e5a2433ab438390949660413195fe93bc11e0e83a7053287d1a56"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.895337 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.896941 4998 generic.go:334] "Generic (PLEG): container finished" podID="683b2874-b3e0-4844-a257-583438c3617c" containerID="c9fb33f405c43585ad154a34727e2701d065a652f807503b0e3818c28d59c475" exitCode=0 Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.897055 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" event={"ID":"683b2874-b3e0-4844-a257-583438c3617c","Type":"ContainerDied","Data":"c9fb33f405c43585ad154a34727e2701d065a652f807503b0e3818c28d59c475"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.897084 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" event={"ID":"683b2874-b3e0-4844-a257-583438c3617c","Type":"ContainerStarted","Data":"deef2ee26694eeb97488157b2d75144df8744d112b556fdfc70180f5a5019b62"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.898117 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" event={"ID":"2646589f-a725-4093-a8c6-0f5fed8105ac","Type":"ContainerStarted","Data":"5ac679a79089edb592c609b3eea1ac92ef60cc4dd2ee8ad937fe4cabf2a5a6cc"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.898154 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" event={"ID":"2646589f-a725-4093-a8c6-0f5fed8105ac","Type":"ContainerStarted","Data":"3094314ac771ab6cfc470caddd5111a3f2fd14457f743d0fd56b85a1219c8742"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.898170 4998 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-f2w4s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.898227 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.902817 4998 generic.go:334] "Generic (PLEG): container finished" podID="94dd5306-7adc-46f9-839e-f40d15cb7d7e" containerID="e2bb9b3d2e16a5ad7e4138f677e6f0c5989b361b81e97252a5d0eb831949b579" exitCode=0 Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.902888 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" event={"ID":"94dd5306-7adc-46f9-839e-f40d15cb7d7e","Type":"ContainerDied","Data":"e2bb9b3d2e16a5ad7e4138f677e6f0c5989b361b81e97252a5d0eb831949b579"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.902914 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" event={"ID":"94dd5306-7adc-46f9-839e-f40d15cb7d7e","Type":"ContainerStarted","Data":"e2554fdb31faa7404df3027e8d2c57bf9bb99ed7c347eabbc94f7efdfd3eb2eb"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.908081 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" event={"ID":"5fe24ff5-9d08-432e-8888-ea97d44ec783","Type":"ContainerStarted","Data":"7456680eda9666b5cd9ba7955b15c947736ef956803cf6ba1d4511d64e609938"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.908136 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" event={"ID":"5fe24ff5-9d08-432e-8888-ea97d44ec783","Type":"ContainerStarted","Data":"5437aaaca408503583b0e83762aa03765f9a3a80f70482efb4d2757a3b505509"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.910906 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2wmmh" event={"ID":"e43c1b92-4c7b-4db9-8363-472ecea3213f","Type":"ContainerStarted","Data":"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.911911 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2wmmh" event={"ID":"e43c1b92-4c7b-4db9-8363-472ecea3213f","Type":"ContainerStarted","Data":"a6e2803bda9d077bfa7c1f6b79103dddac142d579ca236c772369b068b13ec9e"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.913711 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.914135 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" event={"ID":"2efa0050-8a68-418e-9adc-1d9ecc563b11","Type":"ContainerStarted","Data":"901c0cb5f313d16491bbf76ab0730f29a5f4624ed41eb276d596c5a5e987292b"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.914174 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" event={"ID":"2efa0050-8a68-418e-9adc-1d9ecc563b11","Type":"ContainerStarted","Data":"aa916de88186765375f3fc6fcd2c310f3d8a7b7f14ef46f2a34f43878d950400"} Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.931505 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.951630 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.952449 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.960446 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.971426 4998 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 03 06:47:31 crc kubenswrapper[4998]: I0203 06:47:31.991602 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.012043 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.019405 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.045570 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.059141 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.065821 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.073505 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100337 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-metrics-tls\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100378 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67pfd\" (UniqueName: \"kubernetes.io/projected/584d6f48-0415-4b3f-813c-a22af4a339cb-kube-api-access-67pfd\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100423 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/584d6f48-0415-4b3f-813c-a22af4a339cb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100449 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100503 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvxgm\" (UniqueName: \"kubernetes.io/projected/68ed3797-dc9f-4abe-8508-aa1780a90d7e-kube-api-access-fvxgm\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100569 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100592 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grxgf\" (UniqueName: \"kubernetes.io/projected/c15ee4dd-48e2-423d-bbd4-0e136da1d313-kube-api-access-grxgf\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100614 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88a9a581-8243-41b1-8054-94ab0168c25e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100641 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100673 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100698 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjbxk\" (UniqueName: \"kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100725 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68ed3797-dc9f-4abe-8508-aa1780a90d7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100762 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100804 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg268\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100828 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100849 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88a9a581-8243-41b1-8054-94ab0168c25e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100882 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100905 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100938 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.100960 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101023 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101061 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101134 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-images\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101161 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101183 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101225 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj5rj\" (UniqueName: \"kubernetes.io/projected/88a9a581-8243-41b1-8054-94ab0168c25e-kube-api-access-bj5rj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101249 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-config\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101283 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68ed3797-dc9f-4abe-8508-aa1780a90d7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101306 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101333 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101358 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8zpz\" (UniqueName: \"kubernetes.io/projected/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-kube-api-access-g8zpz\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101382 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101408 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101432 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c15ee4dd-48e2-423d-bbd4-0e136da1d313-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101454 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.101505 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.102160 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.602141734 +0000 UTC m=+90.888835540 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.109636 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.111272 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.132016 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.180482 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.196253 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203020 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.203237 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.703194384 +0000 UTC m=+90.989888190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203303 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18114187-6fca-483d-a5e6-e7bcb8c4d254-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203336 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4552ae6c-059f-4f4a-9f20-d6468bc275bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203361 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-metrics-certs\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203391 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-srv-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203434 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-metrics-tls\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203475 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67pfd\" (UniqueName: \"kubernetes.io/projected/584d6f48-0415-4b3f-813c-a22af4a339cb-kube-api-access-67pfd\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203499 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f29b800f-24cd-4615-8692-3fd6e84ad338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203530 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-certs\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203570 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/584d6f48-0415-4b3f-813c-a22af4a339cb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203603 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-csi-data-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203635 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvxgm\" (UniqueName: \"kubernetes.io/projected/68ed3797-dc9f-4abe-8508-aa1780a90d7e-kube-api-access-fvxgm\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203684 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203714 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-webhook-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203735 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203754 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203805 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88a9a581-8243-41b1-8054-94ab0168c25e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203840 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203865 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/847a84bb-a664-4310-82af-79849df6df67-serving-cert\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203918 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203941 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjbxk\" (UniqueName: \"kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203962 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcfsh\" (UniqueName: \"kubernetes.io/projected/f29b800f-24cd-4615-8692-3fd6e84ad338-kube-api-access-kcfsh\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.203988 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204007 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ad430e2-233b-464e-873e-aa2fc29e22e1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204029 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204073 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204097 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204118 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88a9a581-8243-41b1-8054-94ab0168c25e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204169 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv6kz\" (UniqueName: \"kubernetes.io/projected/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-kube-api-access-lv6kz\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204190 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psscq\" (UniqueName: \"kubernetes.io/projected/c39b4ff4-9f98-4732-af70-115c9bb38cb6-kube-api-access-psscq\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204213 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204235 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69e0bd0e-3395-4a06-92ab-ae2fa932902b-metrics-tls\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204255 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfxf4\" (UniqueName: \"kubernetes.io/projected/86f0eb38-79ac-40a4-9000-caadc16a11a7-kube-api-access-lfxf4\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204307 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204328 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8rjj\" (UniqueName: \"kubernetes.io/projected/69e0bd0e-3395-4a06-92ab-ae2fa932902b-kube-api-access-x8rjj\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204351 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-profile-collector-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204373 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad430e2-233b-464e-873e-aa2fc29e22e1-config\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204393 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69e0bd0e-3395-4a06-92ab-ae2fa932902b-config-volume\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204439 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-images\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204479 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8bh2\" (UniqueName: \"kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204504 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204529 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj5rj\" (UniqueName: \"kubernetes.io/projected/88a9a581-8243-41b1-8054-94ab0168c25e-kube-api-access-bj5rj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204552 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-mountpoint-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204576 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j6lk\" (UniqueName: \"kubernetes.io/projected/fce530e9-fb75-4bb1-af35-292bcdf4bc59-kube-api-access-9j6lk\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204598 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204618 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9zx4\" (UniqueName: \"kubernetes.io/projected/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-kube-api-access-g9zx4\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204639 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204677 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204703 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68ed3797-dc9f-4abe-8508-aa1780a90d7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204728 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204755 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ad430e2-233b-464e-873e-aa2fc29e22e1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204791 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lsx8\" (UniqueName: \"kubernetes.io/projected/18114187-6fca-483d-a5e6-e7bcb8c4d254-kube-api-access-2lsx8\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204816 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8ckx\" (UniqueName: \"kubernetes.io/projected/16dec508-e534-422d-8c43-75e9301ef843-kube-api-access-t8ckx\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204859 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204880 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-config\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204931 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c15ee4dd-48e2-423d-bbd4-0e136da1d313-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204958 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb46g\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-kube-api-access-xb46g\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.204981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/86766b51-749e-474c-9bb5-f51af2ec045a-proxy-tls\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205045 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205067 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-socket-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205091 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p58n\" (UniqueName: \"kubernetes.io/projected/6579289f-8354-47bf-bcb2-50c252008c11-kube-api-access-2p58n\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205117 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205141 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c366332a-636c-4b25-9b54-89672576b476-config\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205177 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-registration-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205234 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205257 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zql7\" (UniqueName: \"kubernetes.io/projected/847a84bb-a664-4310-82af-79849df6df67-kube-api-access-5zql7\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205294 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205313 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86f0eb38-79ac-40a4-9000-caadc16a11a7-serving-cert\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205345 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzrrc\" (UniqueName: \"kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205371 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205404 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grxgf\" (UniqueName: \"kubernetes.io/projected/c15ee4dd-48e2-423d-bbd4-0e136da1d313-kube-api-access-grxgf\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.205440 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.705431133 +0000 UTC m=+90.992124939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvj7x\" (UniqueName: \"kubernetes.io/projected/ac2f1df9-6ba2-4684-9bd8-a7134f454945-kube-api-access-kvj7x\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205544 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-stats-auth\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205570 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205612 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68ed3797-dc9f-4abe-8508-aa1780a90d7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205631 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-cabundle\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205668 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg268\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205687 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205707 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzngw\" (UniqueName: \"kubernetes.io/projected/96c6de38-070f-49ec-83b2-5b0a239c4922-kube-api-access-pzngw\") pod \"migrator-59844c95c7-ffwz5\" (UID: \"96c6de38-070f-49ec-83b2-5b0a239c4922\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205727 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205748 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-key\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205801 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205830 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205885 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205903 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-trusted-ca\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205946 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.205998 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206017 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206034 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-node-bootstrap-token\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206055 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m6dq\" (UniqueName: \"kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206071 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-tmpfs\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206088 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vr4v\" (UniqueName: \"kubernetes.io/projected/86766b51-749e-474c-9bb5-f51af2ec045a-kube-api-access-5vr4v\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206111 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-config\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206128 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vmld\" (UniqueName: \"kubernetes.io/projected/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-kube-api-access-9vmld\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206163 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-srv-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206182 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vfh2\" (UniqueName: \"kubernetes.io/projected/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-kube-api-access-9vfh2\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206222 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/86766b51-749e-474c-9bb5-f51af2ec045a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206243 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-plugins-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206265 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fce530e9-fb75-4bb1-af35-292bcdf4bc59-service-ca-bundle\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206283 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4552ae6c-059f-4f4a-9f20-d6468bc275bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206300 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847a84bb-a664-4310-82af-79849df6df67-config\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206317 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6579289f-8354-47bf-bcb2-50c252008c11-cert\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206340 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c366332a-636c-4b25-9b54-89672576b476-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206359 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c366332a-636c-4b25-9b54-89672576b476-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206379 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206399 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8zpz\" (UniqueName: \"kubernetes.io/projected/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-kube-api-access-g8zpz\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206418 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqb5t\" (UniqueName: \"kubernetes.io/projected/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-kube-api-access-dqb5t\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206453 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.206471 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-default-certificate\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.208029 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.208462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.208521 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.209312 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.210870 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-images\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.211876 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.215300 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.215663 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.215903 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.216647 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68ed3797-dc9f-4abe-8508-aa1780a90d7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.218240 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.218481 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88a9a581-8243-41b1-8054-94ab0168c25e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.219069 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.223933 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.226164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584d6f48-0415-4b3f-813c-a22af4a339cb-config\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.226438 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68ed3797-dc9f-4abe-8508-aa1780a90d7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.229753 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.230195 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-metrics-tls\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.232584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.233075 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.233293 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.233562 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.234152 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/584d6f48-0415-4b3f-813c-a22af4a339cb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.235038 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.240335 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88a9a581-8243-41b1-8054-94ab0168c25e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.240816 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.250333 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c15ee4dd-48e2-423d-bbd4-0e136da1d313-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.259449 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.270239 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjbxk\" (UniqueName: \"kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk\") pod \"oauth-openshift-558db77b4-zx8sw\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.270489 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.298759 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67pfd\" (UniqueName: \"kubernetes.io/projected/584d6f48-0415-4b3f-813c-a22af4a339cb-kube-api-access-67pfd\") pod \"machine-api-operator-5694c8668f-hf97k\" (UID: \"584d6f48-0415-4b3f-813c-a22af4a339cb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.308597 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309221 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-trusted-ca\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309287 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-node-bootstrap-token\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309308 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m6dq\" (UniqueName: \"kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309328 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-tmpfs\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309352 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vr4v\" (UniqueName: \"kubernetes.io/projected/86766b51-749e-474c-9bb5-f51af2ec045a-kube-api-access-5vr4v\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309370 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vmld\" (UniqueName: \"kubernetes.io/projected/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-kube-api-access-9vmld\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309393 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-srv-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309411 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vfh2\" (UniqueName: \"kubernetes.io/projected/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-kube-api-access-9vfh2\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/86766b51-749e-474c-9bb5-f51af2ec045a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309450 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-plugins-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309466 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6579289f-8354-47bf-bcb2-50c252008c11-cert\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309487 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fce530e9-fb75-4bb1-af35-292bcdf4bc59-service-ca-bundle\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309507 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4552ae6c-059f-4f4a-9f20-d6468bc275bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309526 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847a84bb-a664-4310-82af-79849df6df67-config\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309543 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c366332a-636c-4b25-9b54-89672576b476-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309562 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c366332a-636c-4b25-9b54-89672576b476-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309589 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqb5t\" (UniqueName: \"kubernetes.io/projected/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-kube-api-access-dqb5t\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309612 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-default-certificate\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309639 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18114187-6fca-483d-a5e6-e7bcb8c4d254-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309659 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-srv-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309677 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4552ae6c-059f-4f4a-9f20-d6468bc275bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309696 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-metrics-certs\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309720 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f29b800f-24cd-4615-8692-3fd6e84ad338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309750 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-certs\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309773 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-csi-data-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309808 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.309829 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.310279 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvxgm\" (UniqueName: \"kubernetes.io/projected/68ed3797-dc9f-4abe-8508-aa1780a90d7e-kube-api-access-fvxgm\") pod \"openshift-apiserver-operator-796bbdcf4f-mp7nc\" (UID: \"68ed3797-dc9f-4abe-8508-aa1780a90d7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.314080 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fce530e9-fb75-4bb1-af35-292bcdf4bc59-service-ca-bundle\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.314404 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-csi-data-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.315888 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4552ae6c-059f-4f4a-9f20-d6468bc275bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316020 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316121 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-webhook-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316196 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316244 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcfsh\" (UniqueName: \"kubernetes.io/projected/f29b800f-24cd-4615-8692-3fd6e84ad338-kube-api-access-kcfsh\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316272 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/847a84bb-a664-4310-82af-79849df6df67-serving-cert\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.316542 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.816513212 +0000 UTC m=+91.103207018 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316645 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316694 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.316729 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ad430e2-233b-464e-873e-aa2fc29e22e1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.319198 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/86766b51-749e-474c-9bb5-f51af2ec045a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.320466 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847a84bb-a664-4310-82af-79849df6df67-config\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.320804 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-metrics-certs\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.321913 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-trusted-ca\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.323208 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f29b800f-24cd-4615-8692-3fd6e84ad338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.323627 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-tmpfs\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.324619 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c366332a-636c-4b25-9b54-89672576b476-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.325140 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-plugins-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.326403 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.826376466 +0000 UTC m=+91.113070262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.326527 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.326904 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv6kz\" (UniqueName: \"kubernetes.io/projected/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-kube-api-access-lv6kz\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327044 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psscq\" (UniqueName: \"kubernetes.io/projected/c39b4ff4-9f98-4732-af70-115c9bb38cb6-kube-api-access-psscq\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327261 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69e0bd0e-3395-4a06-92ab-ae2fa932902b-metrics-tls\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327318 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfxf4\" (UniqueName: \"kubernetes.io/projected/86f0eb38-79ac-40a4-9000-caadc16a11a7-kube-api-access-lfxf4\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327339 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8rjj\" (UniqueName: \"kubernetes.io/projected/69e0bd0e-3395-4a06-92ab-ae2fa932902b-kube-api-access-x8rjj\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327794 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327865 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-profile-collector-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327890 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad430e2-233b-464e-873e-aa2fc29e22e1-config\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327909 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69e0bd0e-3395-4a06-92ab-ae2fa932902b-config-volume\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327946 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8bh2\" (UniqueName: \"kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327967 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.327999 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-mountpoint-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328022 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j6lk\" (UniqueName: \"kubernetes.io/projected/fce530e9-fb75-4bb1-af35-292bcdf4bc59-kube-api-access-9j6lk\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328042 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328062 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9zx4\" (UniqueName: \"kubernetes.io/projected/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-kube-api-access-g9zx4\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328091 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328381 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328436 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ad430e2-233b-464e-873e-aa2fc29e22e1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328461 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lsx8\" (UniqueName: \"kubernetes.io/projected/18114187-6fca-483d-a5e6-e7bcb8c4d254-kube-api-access-2lsx8\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328496 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8ckx\" (UniqueName: \"kubernetes.io/projected/16dec508-e534-422d-8c43-75e9301ef843-kube-api-access-t8ckx\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328529 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-config\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328565 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb46g\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-kube-api-access-xb46g\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328895 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/86766b51-749e-474c-9bb5-f51af2ec045a-proxy-tls\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.328941 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p58n\" (UniqueName: \"kubernetes.io/projected/6579289f-8354-47bf-bcb2-50c252008c11-kube-api-access-2p58n\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329009 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-socket-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329046 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c366332a-636c-4b25-9b54-89672576b476-config\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329082 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-registration-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329122 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329155 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zql7\" (UniqueName: \"kubernetes.io/projected/847a84bb-a664-4310-82af-79849df6df67-kube-api-access-5zql7\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329184 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86f0eb38-79ac-40a4-9000-caadc16a11a7-serving-cert\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329233 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzrrc\" (UniqueName: \"kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329282 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329345 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvj7x\" (UniqueName: \"kubernetes.io/projected/ac2f1df9-6ba2-4684-9bd8-a7134f454945-kube-api-access-kvj7x\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-stats-auth\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329437 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.329865 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-srv-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.332108 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-webhook-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.332480 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.332599 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-cabundle\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.333068 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-node-bootstrap-token\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.333739 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-cabundle\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.333911 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzngw\" (UniqueName: \"kubernetes.io/projected/96c6de38-070f-49ec-83b2-5b0a239c4922-kube-api-access-pzngw\") pod \"migrator-59844c95c7-ffwz5\" (UID: \"96c6de38-070f-49ec-83b2-5b0a239c4922\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.333954 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.334011 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-key\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.334519 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86f0eb38-79ac-40a4-9000-caadc16a11a7-config\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.334706 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18114187-6fca-483d-a5e6-e7bcb8c4d254-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.334723 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69e0bd0e-3395-4a06-92ab-ae2fa932902b-config-volume\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.335009 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.335253 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/847a84bb-a664-4310-82af-79849df6df67-serving-cert\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.335658 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-mountpoint-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.335936 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-certs\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.336189 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69e0bd0e-3395-4a06-92ab-ae2fa932902b-metrics-tls\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.336925 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.336979 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6579289f-8354-47bf-bcb2-50c252008c11-cert\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.337407 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj5rj\" (UniqueName: \"kubernetes.io/projected/88a9a581-8243-41b1-8054-94ab0168c25e-kube-api-access-bj5rj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6mmvm\" (UID: \"88a9a581-8243-41b1-8054-94ab0168c25e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.337919 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-socket-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.338388 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c366332a-636c-4b25-9b54-89672576b476-config\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.338445 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/16dec508-e534-422d-8c43-75e9301ef843-registration-dir\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.338936 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.339291 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-default-certificate\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.339503 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad430e2-233b-464e-873e-aa2fc29e22e1-config\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.344030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ac2f1df9-6ba2-4684-9bd8-a7134f454945-profile-collector-cert\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.344068 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.344590 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.344734 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ad430e2-233b-464e-873e-aa2fc29e22e1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.345245 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4552ae6c-059f-4f4a-9f20-d6468bc275bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.348536 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fce530e9-fb75-4bb1-af35-292bcdf4bc59-stats-auth\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.349337 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/86766b51-749e-474c-9bb5-f51af2ec045a-proxy-tls\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.349817 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-srv-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.351018 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.351039 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c39b4ff4-9f98-4732-af70-115c9bb38cb6-signing-key\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.351158 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.352582 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.359405 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.360195 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.368331 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86f0eb38-79ac-40a4-9000-caadc16a11a7-serving-cert\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.378196 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.378843 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8zpz\" (UniqueName: \"kubernetes.io/projected/3fe2dd6d-16f7-4170-b830-b4d7406f9f30-kube-api-access-g8zpz\") pod \"dns-operator-744455d44c-mqrkr\" (UID: \"3fe2dd6d-16f7-4170-b830-b4d7406f9f30\") " pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.397616 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grxgf\" (UniqueName: \"kubernetes.io/projected/c15ee4dd-48e2-423d-bbd4-0e136da1d313-kube-api-access-grxgf\") pod \"cluster-samples-operator-665b6dd947-tpbvt\" (UID: \"c15ee4dd-48e2-423d-bbd4-0e136da1d313\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.398676 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.419563 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg268\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.430548 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.432143 4998 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.432507 4998 csr.go:257] certificate signing request csr-gxp74 is issued Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.434632 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.435051 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:32.93503592 +0000 UTC m=+91.221729726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.447033 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l249c"] Feb 03 06:47:32 crc kubenswrapper[4998]: W0203 06:47:32.463530 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ed713f2_843a_4706_a6bc_2d59bd9d2c44.slice/crio-875a496aec4035a3810fa1d049e5a5393e9e86d13482e817e5e43786405e9c07 WatchSource:0}: Error finding container 875a496aec4035a3810fa1d049e5a5393e9e86d13482e817e5e43786405e9c07: Status 404 returned error can't find the container with id 875a496aec4035a3810fa1d049e5a5393e9e86d13482e817e5e43786405e9c07 Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.473565 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.505685 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.508680 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c366332a-636c-4b25-9b54-89672576b476-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lhs9k\" (UID: \"c366332a-636c-4b25-9b54-89672576b476\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.516611 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.517228 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.521409 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vmld\" (UniqueName: \"kubernetes.io/projected/ff1b3af9-22f7-4ac6-93a5-5794c7022f68-kube-api-access-9vmld\") pod \"olm-operator-6b444d44fb-jppzk\" (UID: \"ff1b3af9-22f7-4ac6-93a5-5794c7022f68\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.531277 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.536616 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.537000 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.036989948 +0000 UTC m=+91.323683754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.540390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vr4v\" (UniqueName: \"kubernetes.io/projected/86766b51-749e-474c-9bb5-f51af2ec045a-kube-api-access-5vr4v\") pod \"machine-config-controller-84d6567774-ksr8m\" (UID: \"86766b51-749e-474c-9bb5-f51af2ec045a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.544508 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.554398 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vfh2\" (UniqueName: \"kubernetes.io/projected/8aa1c556-e15a-44dc-a37e-d00eb1717bf2-kube-api-access-9vfh2\") pod \"package-server-manager-789f6589d5-hgcng\" (UID: \"8aa1c556-e15a-44dc-a37e-d00eb1717bf2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: W0203 06:47:32.554609 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aaa1428_ac87_459e_bcda_7824f9569593.slice/crio-59e6b969657a2a5dcc514d0cb5c82c02b59d5e48d1005902b8e61520a3c73d7b WatchSource:0}: Error finding container 59e6b969657a2a5dcc514d0cb5c82c02b59d5e48d1005902b8e61520a3c73d7b: Status 404 returned error can't find the container with id 59e6b969657a2a5dcc514d0cb5c82c02b59d5e48d1005902b8e61520a3c73d7b Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.574609 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m6dq\" (UniqueName: \"kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq\") pod \"controller-manager-879f6c89f-cv476\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.596699 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqb5t\" (UniqueName: \"kubernetes.io/projected/bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5-kube-api-access-dqb5t\") pod \"packageserver-d55dfcdfc-kchsz\" (UID: \"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.611581 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.613960 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zx8sw"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.624973 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzzcb"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.626601 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ad430e2-233b-464e-873e-aa2fc29e22e1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r2np7\" (UID: \"6ad430e2-233b-464e-873e-aa2fc29e22e1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.628197 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcfsh\" (UniqueName: \"kubernetes.io/projected/f29b800f-24cd-4615-8692-3fd6e84ad338-kube-api-access-kcfsh\") pod \"control-plane-machine-set-operator-78cbb6b69f-57hdh\" (UID: \"f29b800f-24cd-4615-8692-3fd6e84ad338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.637349 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.637691 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.137664836 +0000 UTC m=+91.424358642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.651606 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv6kz\" (UniqueName: \"kubernetes.io/projected/c23f4fd7-0bd7-4b4c-9697-b6bb583dba72-kube-api-access-lv6kz\") pod \"kube-storage-version-migrator-operator-b67b599dd-fx6gz\" (UID: \"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: W0203 06:47:32.657676 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod263a6d98_1027_4782_8d2b_1b7274f389ea.slice/crio-aa33c98daf25020fb519f53af7525d0c0f3673d74714922012bbe6bf8e2a7321 WatchSource:0}: Error finding container aa33c98daf25020fb519f53af7525d0c0f3673d74714922012bbe6bf8e2a7321: Status 404 returned error can't find the container with id aa33c98daf25020fb519f53af7525d0c0f3673d74714922012bbe6bf8e2a7321 Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.672428 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psscq\" (UniqueName: \"kubernetes.io/projected/c39b4ff4-9f98-4732-af70-115c9bb38cb6-kube-api-access-psscq\") pod \"service-ca-9c57cc56f-t6spv\" (UID: \"c39b4ff4-9f98-4732-af70-115c9bb38cb6\") " pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.683799 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.686549 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lsx8\" (UniqueName: \"kubernetes.io/projected/18114187-6fca-483d-a5e6-e7bcb8c4d254-kube-api-access-2lsx8\") pod \"multus-admission-controller-857f4d67dd-l7cxx\" (UID: \"18114187-6fca-483d-a5e6-e7bcb8c4d254\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.703270 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.714532 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.719271 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.719319 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8rjj\" (UniqueName: \"kubernetes.io/projected/69e0bd0e-3395-4a06-92ab-ae2fa932902b-kube-api-access-x8rjj\") pod \"dns-default-cp5t2\" (UID: \"69e0bd0e-3395-4a06-92ab-ae2fa932902b\") " pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.724937 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.732523 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8ckx\" (UniqueName: \"kubernetes.io/projected/16dec508-e534-422d-8c43-75e9301ef843-kube-api-access-t8ckx\") pod \"csi-hostpathplugin-6g4qm\" (UID: \"16dec508-e534-422d-8c43-75e9301ef843\") " pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.739051 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.740309 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.240292945 +0000 UTC m=+91.526986751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.740818 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.748210 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.750371 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfxf4\" (UniqueName: \"kubernetes.io/projected/86f0eb38-79ac-40a4-9000-caadc16a11a7-kube-api-access-lfxf4\") pod \"console-operator-58897d9998-pbbqh\" (UID: \"86f0eb38-79ac-40a4-9000-caadc16a11a7\") " pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.761957 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.764654 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.769049 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb46g\" (UniqueName: \"kubernetes.io/projected/4552ae6c-059f-4f4a-9f20-d6468bc275bc-kube-api-access-xb46g\") pod \"ingress-operator-5b745b69d9-qshv4\" (UID: \"4552ae6c-059f-4f4a-9f20-d6468bc275bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.774903 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.783119 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.790592 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8bh2\" (UniqueName: \"kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2\") pod \"collect-profiles-29501685-vmrhm\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.807921 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.809301 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzngw\" (UniqueName: \"kubernetes.io/projected/96c6de38-070f-49ec-83b2-5b0a239c4922-kube-api-access-pzngw\") pod \"migrator-59844c95c7-ffwz5\" (UID: \"96c6de38-070f-49ec-83b2-5b0a239c4922\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.828940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9zx4\" (UniqueName: \"kubernetes.io/projected/bfeeb90f-4d86-4455-baca-e23a1c85a4c5-kube-api-access-g9zx4\") pod \"machine-config-server-6dxmx\" (UID: \"bfeeb90f-4d86-4455-baca-e23a1c85a4c5\") " pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.837291 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.844302 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.844833 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.344815752 +0000 UTC m=+91.631509558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.849078 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p58n\" (UniqueName: \"kubernetes.io/projected/6579289f-8354-47bf-bcb2-50c252008c11-kube-api-access-2p58n\") pod \"ingress-canary-p5fkm\" (UID: \"6579289f-8354-47bf-bcb2-50c252008c11\") " pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.864245 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.871816 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zql7\" (UniqueName: \"kubernetes.io/projected/847a84bb-a664-4310-82af-79849df6df67-kube-api-access-5zql7\") pod \"service-ca-operator-777779d784-sxhrq\" (UID: \"847a84bb-a664-4310-82af-79849df6df67\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.892548 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j6lk\" (UniqueName: \"kubernetes.io/projected/fce530e9-fb75-4bb1-af35-292bcdf4bc59-kube-api-access-9j6lk\") pod \"router-default-5444994796-6dmdn\" (UID: \"fce530e9-fb75-4bb1-af35-292bcdf4bc59\") " pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.904664 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.918345 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.926768 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvj7x\" (UniqueName: \"kubernetes.io/projected/ac2f1df9-6ba2-4684-9bd8-a7134f454945-kube-api-access-kvj7x\") pod \"catalog-operator-68c6474976-xwdnv\" (UID: \"ac2f1df9-6ba2-4684-9bd8-a7134f454945\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.954854 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzrrc\" (UniqueName: \"kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc\") pod \"marketplace-operator-79b997595-6d7sj\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.955470 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:32 crc kubenswrapper[4998]: E0203 06:47:32.955812 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.455800527 +0000 UTC m=+91.742494333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.958801 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hf97k"] Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.975594 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.985435 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" event={"ID":"94dd5306-7adc-46f9-839e-f40d15cb7d7e","Type":"ContainerStarted","Data":"b194d107da58e1124ce8c39b99b6bd10e785762298f34405b1b5fd7a66f4d609"} Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.991667 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" event={"ID":"263a6d98-1027-4782-8d2b-1b7274f389ea","Type":"ContainerStarted","Data":"aa33c98daf25020fb519f53af7525d0c0f3673d74714922012bbe6bf8e2a7321"} Feb 03 06:47:32 crc kubenswrapper[4998]: I0203 06:47:32.994403 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.011850 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" event={"ID":"4daf44c8-ba33-4d28-85a0-e8110bce8117","Type":"ContainerStarted","Data":"6c5422eecb61c906dac70b2ac84c2ee17b1a7108f5471593b6e8ad954e2d31b5"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.011929 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" event={"ID":"4daf44c8-ba33-4d28-85a0-e8110bce8117","Type":"ContainerStarted","Data":"a29b1ed92c58799cfcca2aaa69b49ca59e41e609a72e719dd45b8654ca312935"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.036199 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.052297 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" event={"ID":"683b2874-b3e0-4844-a257-583438c3617c","Type":"ContainerStarted","Data":"94764537d1a45ecd5e1c7d76cbc7f3435c0480deac26a7978e86fec4d2966b1c"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.052336 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" event={"ID":"683b2874-b3e0-4844-a257-583438c3617c","Type":"ContainerStarted","Data":"9253e6d4548d7f517c4efaa58b5d9dd85b81d48a22a8d69c160f9bad4408a278"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.056566 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.056914 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.556891359 +0000 UTC m=+91.843585165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.057944 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" event={"ID":"1aaa1428-ac87-459e-bcda-7824f9569593","Type":"ContainerStarted","Data":"59e6b969657a2a5dcc514d0cb5c82c02b59d5e48d1005902b8e61520a3c73d7b"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.067083 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l249c" event={"ID":"9ed713f2-843a-4706-a6bc-2d59bd9d2c44","Type":"ContainerStarted","Data":"f1941cc7076c26e1121f16f26f84f9c1c90d1d553c48352e842a773c3e481304"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.067146 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l249c" event={"ID":"9ed713f2-843a-4706-a6bc-2d59bd9d2c44","Type":"ContainerStarted","Data":"875a496aec4035a3810fa1d049e5a5393e9e86d13482e817e5e43786405e9c07"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.067483 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.070377 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.070427 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:33 crc kubenswrapper[4998]: W0203 06:47:33.079097 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod584d6f48_0415_4b3f_813c_a22af4a339cb.slice/crio-6be93205647bcf9eeea66130081d5784b6ad427dd44ad994f1e8a66c8aa60fa4 WatchSource:0}: Error finding container 6be93205647bcf9eeea66130081d5784b6ad427dd44ad994f1e8a66c8aa60fa4: Status 404 returned error can't find the container with id 6be93205647bcf9eeea66130081d5784b6ad427dd44ad994f1e8a66c8aa60fa4 Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.079151 4998 generic.go:334] "Generic (PLEG): container finished" podID="40b4149a-acf4-4ccc-be3c-4471354e69d2" containerID="166f3a541813bb6bb87c276992ada7ee2e25f84f2d66470ffe7b9c7d1ea4e27a" exitCode=0 Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.079177 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" event={"ID":"40b4149a-acf4-4ccc-be3c-4471354e69d2","Type":"ContainerDied","Data":"166f3a541813bb6bb87c276992ada7ee2e25f84f2d66470ffe7b9c7d1ea4e27a"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.079630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" event={"ID":"40b4149a-acf4-4ccc-be3c-4471354e69d2","Type":"ContainerStarted","Data":"90ab534b42dfc3c084403327ace9f37a68d1b63c9fad94f2243ae8a35a5a3a10"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.101978 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.102507 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.106250 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.115261 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6dxmx" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.120091 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" event={"ID":"8d9bf04a-afb1-49d2-990b-a094855aadd7","Type":"ContainerStarted","Data":"11f1e5b96d2be298c8b638083328c71becceba3026542bee1dc2602eaf60455c"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.122381 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.134622 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" event={"ID":"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05","Type":"ContainerStarted","Data":"499aa0d0ff600d8cbd64201a2ca5a7d427dbafb64001aedf4bc18d4add33940c"} Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.137877 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.143115 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-p5fkm" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.157652 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.159167 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.659155026 +0000 UTC m=+91.945848832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.190403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mqrkr"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.215363 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.259735 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.260015 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.759985919 +0000 UTC m=+92.046679725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.260244 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.263264 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.76324848 +0000 UTC m=+92.049942506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.356869 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.362289 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.362758 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.862741512 +0000 UTC m=+92.149435318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.417507 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.436948 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-03 06:42:32 +0000 UTC, rotation deadline is 2026-12-26 12:13:13.368084225 +0000 UTC Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.436978 4998 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7829h25m39.931110718s for next certificate rotation Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.463737 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.464912 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:33.964096041 +0000 UTC m=+92.250789847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.475648 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:47:33 crc kubenswrapper[4998]: W0203 06:47:33.497713 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68ed3797_dc9f_4abe_8508_aa1780a90d7e.slice/crio-b8f91e4921b492e22e406dbdebc21959e4f6d90abb0a9c73e945229b0e13b7b5 WatchSource:0}: Error finding container b8f91e4921b492e22e406dbdebc21959e4f6d90abb0a9c73e945229b0e13b7b5: Status 404 returned error can't find the container with id b8f91e4921b492e22e406dbdebc21959e4f6d90abb0a9c73e945229b0e13b7b5 Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.568227 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.571353 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.071328252 +0000 UTC m=+92.358022058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.571456 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.571959 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.071951271 +0000 UTC m=+92.358645077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.677536 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.677951 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.177922952 +0000 UTC m=+92.464616758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.719536 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4dbn" podStartSLOduration=71.719516722 podStartE2EDuration="1m11.719516722s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:33.686752704 +0000 UTC m=+91.973446520" watchObservedRunningTime="2026-02-03 06:47:33.719516722 +0000 UTC m=+92.006210518" Feb 03 06:47:33 crc kubenswrapper[4998]: W0203 06:47:33.723069 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfce530e9_fb75_4bb1_af35_292bcdf4bc59.slice/crio-4d97f81da176bd62c338cb96d8494c5109851633eba17f798d5eb04fede3806c WatchSource:0}: Error finding container 4d97f81da176bd62c338cb96d8494c5109851633eba17f798d5eb04fede3806c: Status 404 returned error can't find the container with id 4d97f81da176bd62c338cb96d8494c5109851633eba17f798d5eb04fede3806c Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.726453 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.772304 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9qkw" podStartSLOduration=71.772289607 podStartE2EDuration="1m11.772289607s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:33.770920685 +0000 UTC m=+92.057614491" watchObservedRunningTime="2026-02-03 06:47:33.772289607 +0000 UTC m=+92.058983413" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.782415 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.782729 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.282712937 +0000 UTC m=+92.569406743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.799156 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.838138 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk"] Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.883352 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.883499 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.383476169 +0000 UTC m=+92.670169975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.883947 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.884282 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.384274353 +0000 UTC m=+92.670968159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.907277 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" podStartSLOduration=70.907257831 podStartE2EDuration="1m10.907257831s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:33.857730616 +0000 UTC m=+92.144424432" watchObservedRunningTime="2026-02-03 06:47:33.907257831 +0000 UTC m=+92.193951637" Feb 03 06:47:33 crc kubenswrapper[4998]: I0203 06:47:33.990251 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:33 crc kubenswrapper[4998]: E0203 06:47:33.990811 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.490792591 +0000 UTC m=+92.777486397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.011736 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7"] Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.032858 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l7cxx"] Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.096615 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.097010 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.59699596 +0000 UTC m=+92.883689776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.101742 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-47pmn" podStartSLOduration=72.101728476 podStartE2EDuration="1m12.101728476s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.100097306 +0000 UTC m=+92.386791112" watchObservedRunningTime="2026-02-03 06:47:34.101728476 +0000 UTC m=+92.388422282" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.172229 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" event={"ID":"c366332a-636c-4b25-9b54-89672576b476","Type":"ContainerStarted","Data":"716fd5d670856d1beab0950710ca7875db1954f81d5bd5195807d3eb8a50033c"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.173464 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" event={"ID":"943dc95b-dbe5-4561-bbe2-b2bf1680eb60","Type":"ContainerStarted","Data":"2f3e4aff546911b56fd78c407bcf2a71607acfa0ff82b6e7c3d5d42da128eb9a"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.174502 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" event={"ID":"584d6f48-0415-4b3f-813c-a22af4a339cb","Type":"ContainerStarted","Data":"6be93205647bcf9eeea66130081d5784b6ad427dd44ad994f1e8a66c8aa60fa4"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.176561 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6dxmx" event={"ID":"bfeeb90f-4d86-4455-baca-e23a1c85a4c5","Type":"ContainerStarted","Data":"eab383a487b75bb6fea503c43f2c00f9ad276af3b0c38e84e16d1dbd658e0280"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.178845 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6dmdn" event={"ID":"fce530e9-fb75-4bb1-af35-292bcdf4bc59","Type":"ContainerStarted","Data":"4d97f81da176bd62c338cb96d8494c5109851633eba17f798d5eb04fede3806c"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.179864 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" event={"ID":"68ed3797-dc9f-4abe-8508-aa1780a90d7e","Type":"ContainerStarted","Data":"b8f91e4921b492e22e406dbdebc21959e4f6d90abb0a9c73e945229b0e13b7b5"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.180892 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" event={"ID":"86766b51-749e-474c-9bb5-f51af2ec045a","Type":"ContainerStarted","Data":"b9eb1a5fcc8737c3c70a53003d03679e955c822dbd43222533937214ded30dc3"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.195229 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" event={"ID":"263a6d98-1027-4782-8d2b-1b7274f389ea","Type":"ContainerStarted","Data":"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.195571 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.196487 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" event={"ID":"e7361bde-fabd-4ec0-a6cc-aae41dcf4d05","Type":"ContainerStarted","Data":"9e8b1c9c941731d009b13c4ed4d2ea99338b6bc379ebcea258facc3bee890192"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.197118 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.197309 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.697296217 +0000 UTC m=+92.983990023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.199455 4998 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zx8sw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.199525 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.199870 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.200365 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.700344291 +0000 UTC m=+92.987038097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.206459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" event={"ID":"3fe2dd6d-16f7-4170-b830-b4d7406f9f30","Type":"ContainerStarted","Data":"6187fdbd7717b8bd0bd235d523a4c9fe1c47efbad09b8389d26fc923a074eaf5"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.215667 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" event={"ID":"1aaa1428-ac87-459e-bcda-7824f9569593","Type":"ContainerStarted","Data":"01ada89f59521ee4b172cf0d5a07ff5e30baed1536ba7c89f1c27257e259bfee"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.227544 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" event={"ID":"ff1b3af9-22f7-4ac6-93a5-5794c7022f68","Type":"ContainerStarted","Data":"2562b7c06b9320950481cdcf6a59453bf357cca18e5de22c8ee5592c705a81db"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.235283 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" event={"ID":"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5","Type":"ContainerStarted","Data":"6bb6ba706605611f7256c2e4a4b7280138a060c8eb0ce2c9b8456dfb937835db"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.239026 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" event={"ID":"8aa1c556-e15a-44dc-a37e-d00eb1717bf2","Type":"ContainerStarted","Data":"02d58dd7b07b589b5d6a89ddd071968d7c84f2341aa2ee6fc5dd28f05757af8a"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.240375 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" event={"ID":"c15ee4dd-48e2-423d-bbd4-0e136da1d313","Type":"ContainerStarted","Data":"66e2cc229e2da8cc54d1bb95d01385c0045d46218483c6955060678419e85a6d"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.247327 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" event={"ID":"8d9bf04a-afb1-49d2-990b-a094855aadd7","Type":"ContainerStarted","Data":"a2072d2cb6bc873cb7796b3eec83c82ebce7c3031156f9ff494681d7f91718d3"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.248263 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" event={"ID":"88a9a581-8243-41b1-8054-94ab0168c25e","Type":"ContainerStarted","Data":"db1efad43e99e8ac81fa9dc28e4c68707f8d36c3e2da70671497bf698545d874"} Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.248954 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.248992 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.313544 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.315012 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.814986509 +0000 UTC m=+93.101680495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.349225 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-l249c" podStartSLOduration=72.349204672 podStartE2EDuration="1m12.349204672s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.348929634 +0000 UTC m=+92.635623460" watchObservedRunningTime="2026-02-03 06:47:34.349204672 +0000 UTC m=+92.635898488" Feb 03 06:47:34 crc kubenswrapper[4998]: W0203 06:47:34.395748 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18114187_6fca_483d_a5e6_e7bcb8c4d254.slice/crio-5a4f54f323f84725a0cac1b7486222b157f3168652a69adcef7d67a2b9b89d45 WatchSource:0}: Error finding container 5a4f54f323f84725a0cac1b7486222b157f3168652a69adcef7d67a2b9b89d45: Status 404 returned error can't find the container with id 5a4f54f323f84725a0cac1b7486222b157f3168652a69adcef7d67a2b9b89d45 Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.422829 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.427577 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:34.927554874 +0000 UTC m=+93.214248850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.543269 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.543435 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.043409979 +0000 UTC m=+93.330103785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.543704 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.544182 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.044169333 +0000 UTC m=+93.330863139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.556031 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" podStartSLOduration=72.556012937 podStartE2EDuration="1m12.556012937s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.553917383 +0000 UTC m=+92.840611209" watchObservedRunningTime="2026-02-03 06:47:34.556012937 +0000 UTC m=+92.842706743" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.587672 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" podStartSLOduration=71.587653051 podStartE2EDuration="1m11.587653051s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.579878112 +0000 UTC m=+92.866571928" watchObservedRunningTime="2026-02-03 06:47:34.587653051 +0000 UTC m=+92.874346857" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.644906 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.645052 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.145027977 +0000 UTC m=+93.431721783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.645343 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.645697 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.145683767 +0000 UTC m=+93.432377573 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.746393 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.746517 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.246500279 +0000 UTC m=+93.533194085 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.746665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.747186 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.24716869 +0000 UTC m=+93.533862486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.847726 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.848287 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.348267342 +0000 UTC m=+93.634961148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.867360 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vf25l" podStartSLOduration=72.867340349 podStartE2EDuration="1m12.867340349s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.866154102 +0000 UTC m=+93.152847928" watchObservedRunningTime="2026-02-03 06:47:34.867340349 +0000 UTC m=+93.154034155" Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.930490 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t6spv"] Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.949755 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:34 crc kubenswrapper[4998]: E0203 06:47:34.950241 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.45022957 +0000 UTC m=+93.736923366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.978275 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh"] Feb 03 06:47:34 crc kubenswrapper[4998]: I0203 06:47:34.982815 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2wmmh" podStartSLOduration=72.982798702 podStartE2EDuration="1m12.982798702s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:34.982029958 +0000 UTC m=+93.268723794" watchObservedRunningTime="2026-02-03 06:47:34.982798702 +0000 UTC m=+93.269492508" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.001770 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.027529 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-nzzcb" podStartSLOduration=73.027514048 podStartE2EDuration="1m13.027514048s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.026419104 +0000 UTC m=+93.313112910" watchObservedRunningTime="2026-02-03 06:47:35.027514048 +0000 UTC m=+93.314207854" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.054511 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.054999 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.554979893 +0000 UTC m=+93.841673699 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.088758 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" podStartSLOduration=73.088736042 podStartE2EDuration="1m13.088736042s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.086452712 +0000 UTC m=+93.373146518" watchObservedRunningTime="2026-02-03 06:47:35.088736042 +0000 UTC m=+93.375429848" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.156539 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.156948 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.656933541 +0000 UTC m=+93.943627347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.220427 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kqkpx" podStartSLOduration=72.220404595 podStartE2EDuration="1m12.220404595s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.120220421 +0000 UTC m=+93.406914237" watchObservedRunningTime="2026-02-03 06:47:35.220404595 +0000 UTC m=+93.507098401" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.226801 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.236218 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.239726 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6g4qm"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.250700 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-p5fkm"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.253438 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cp5t2"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.257764 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.258992 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.757970461 +0000 UTC m=+94.044664407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.266554 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq"] Feb 03 06:47:35 crc kubenswrapper[4998]: W0203 06:47:35.272126 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96c6de38_070f_49ec_83b2_5b0a239c4922.slice/crio-78bf5d6c41f53c346b8f735b74860995cf18cf48f0b9cfcba38c065e4380ef59 WatchSource:0}: Error finding container 78bf5d6c41f53c346b8f735b74860995cf18cf48f0b9cfcba38c065e4380ef59: Status 404 returned error can't find the container with id 78bf5d6c41f53c346b8f735b74860995cf18cf48f0b9cfcba38c065e4380ef59 Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.282803 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.316029 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" event={"ID":"8d9bf04a-afb1-49d2-990b-a094855aadd7","Type":"ContainerStarted","Data":"78f37306510f70044fa2298ddda44a90550ac4708a015d9e40812825d086dbe7"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.320219 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.323518 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.332580 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pbbqh"] Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.339660 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6dmdn" event={"ID":"fce530e9-fb75-4bb1-af35-292bcdf4bc59","Type":"ContainerStarted","Data":"bc80599a0e859be909cea9effa160d969ec38e53b65a45828eff345076600a28"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.361175 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" event={"ID":"5a571eff-c1d3-4fc0-84e4-43cadf0c7979","Type":"ContainerStarted","Data":"f7d5cc34a318a4c6e722dacc52b24460504349527660695f985351d1c2b575c0"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.361259 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.362310 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.862294142 +0000 UTC m=+94.148987948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.363528 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bwhwc" podStartSLOduration=72.36351767 podStartE2EDuration="1m12.36351767s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.362376255 +0000 UTC m=+93.649070091" watchObservedRunningTime="2026-02-03 06:47:35.36351767 +0000 UTC m=+93.650211476" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.393686 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" event={"ID":"ff1b3af9-22f7-4ac6-93a5-5794c7022f68","Type":"ContainerStarted","Data":"3982ed406464922de5a5146f32c76fcfdc0f066b0bf522c409c53bca3889c220"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.394498 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.411398 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" event={"ID":"584d6f48-0415-4b3f-813c-a22af4a339cb","Type":"ContainerStarted","Data":"a1d9f972a69911a3bf1d5e68ee969bd18f30e677657f6ffe2daefe70a95e4098"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.415051 4998 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-jppzk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.415094 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" podUID="ff1b3af9-22f7-4ac6-93a5-5794c7022f68" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.418729 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" event={"ID":"8aa1c556-e15a-44dc-a37e-d00eb1717bf2","Type":"ContainerStarted","Data":"85d8f23ba2486ca1389e7381c5e7de2496815e71b423f9d1470884b3d648c823"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.425678 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" event={"ID":"c15ee4dd-48e2-423d-bbd4-0e136da1d313","Type":"ContainerStarted","Data":"37d010d05d29c0d24247026ec62f15cbd7f3ed7909af5d12682baf00bfefd1bc"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.427809 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6dmdn" podStartSLOduration=72.427772928 podStartE2EDuration="1m12.427772928s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.392626876 +0000 UTC m=+93.679320692" watchObservedRunningTime="2026-02-03 06:47:35.427772928 +0000 UTC m=+93.714466734" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.432028 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" event={"ID":"943dc95b-dbe5-4561-bbe2-b2bf1680eb60","Type":"ContainerStarted","Data":"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.433411 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.444387 4998 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-cv476 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.444438 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.444987 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" event={"ID":"6ad430e2-233b-464e-873e-aa2fc29e22e1","Type":"ContainerStarted","Data":"e5dd5760cd8f5858f7e4ca9e0ad0c3fb69968fd76a332b716945e8bbb483cee5"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.445039 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" event={"ID":"6ad430e2-233b-464e-873e-aa2fc29e22e1","Type":"ContainerStarted","Data":"7c26c115de217637ac5db68c4ca2e0110b3b81039fd1cb8463aa56cf364c06c0"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.464110 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.465118 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:35.965101306 +0000 UTC m=+94.251795112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.468756 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" podStartSLOduration=73.468732578 podStartE2EDuration="1m13.468732578s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.463980192 +0000 UTC m=+93.750673998" watchObservedRunningTime="2026-02-03 06:47:35.468732578 +0000 UTC m=+93.755426384" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.469193 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" podStartSLOduration=72.469187582 podStartE2EDuration="1m12.469187582s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.425525098 +0000 UTC m=+93.712218904" watchObservedRunningTime="2026-02-03 06:47:35.469187582 +0000 UTC m=+93.755881388" Feb 03 06:47:35 crc kubenswrapper[4998]: W0203 06:47:35.474092 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc23f4fd7_0bd7_4b4c_9697_b6bb583dba72.slice/crio-1362bab5307a8f96d2c9b189997983a361a3ed049a22018aee53f6e253a5e9fa WatchSource:0}: Error finding container 1362bab5307a8f96d2c9b189997983a361a3ed049a22018aee53f6e253a5e9fa: Status 404 returned error can't find the container with id 1362bab5307a8f96d2c9b189997983a361a3ed049a22018aee53f6e253a5e9fa Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.481395 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" event={"ID":"40b4149a-acf4-4ccc-be3c-4471354e69d2","Type":"ContainerStarted","Data":"0bf5daf861839043e062044756cc0c3162498d0256705c2837b80032b59d3c55"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.484408 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.505313 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" event={"ID":"bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5","Type":"ContainerStarted","Data":"bd92331a3426fa360951b31bdfb2c4e0b82a837c45ee3b80998f81f4e4a8f38a"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.505744 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.511893 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r2np7" podStartSLOduration=72.511872906 podStartE2EDuration="1m12.511872906s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.484336738 +0000 UTC m=+93.771030544" watchObservedRunningTime="2026-02-03 06:47:35.511872906 +0000 UTC m=+93.798566712" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.512133 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" podStartSLOduration=73.512128284 podStartE2EDuration="1m13.512128284s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.510836974 +0000 UTC m=+93.797530790" watchObservedRunningTime="2026-02-03 06:47:35.512128284 +0000 UTC m=+93.798822090" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.516273 4998 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-kchsz container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" start-of-body= Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.516404 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" podUID="bc17a6bb-32b1-4f5c-bbe5-24d5d0d379d5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.534166 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" event={"ID":"c366332a-636c-4b25-9b54-89672576b476","Type":"ContainerStarted","Data":"d1b4a8d9fa3802f738b8b1db96d0f8f3c73675e3b6c846ea92ae098a92042576"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.537191 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" podStartSLOduration=72.537167635 podStartE2EDuration="1m12.537167635s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.530147278 +0000 UTC m=+93.816841094" watchObservedRunningTime="2026-02-03 06:47:35.537167635 +0000 UTC m=+93.823861431" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.544611 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" event={"ID":"18114187-6fca-483d-a5e6-e7bcb8c4d254","Type":"ContainerStarted","Data":"c675d7a9eda95411a3d65e12dcbcba5db7795a35633214a4f2949bc0cb8aeee7"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.544659 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" event={"ID":"18114187-6fca-483d-a5e6-e7bcb8c4d254","Type":"ContainerStarted","Data":"5a4f54f323f84725a0cac1b7486222b157f3168652a69adcef7d67a2b9b89d45"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.570195 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.570488 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.07047744 +0000 UTC m=+94.357171246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.575080 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" event={"ID":"f29b800f-24cd-4615-8692-3fd6e84ad338","Type":"ContainerStarted","Data":"f8109c4cfdf24d131aa198b5e65cdb05499fe5dd65fe7653fa45808bf8816255"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.578135 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" event={"ID":"68ed3797-dc9f-4abe-8508-aa1780a90d7e","Type":"ContainerStarted","Data":"7af46b51388963574101df9845d520e6ec794fce654179066676992b857bd6c6"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.587219 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" event={"ID":"ac2f1df9-6ba2-4684-9bd8-a7134f454945","Type":"ContainerStarted","Data":"460338ec1e92d6d1668ef06aa0f02476e53aa5e9675e8e6722fae977e96ce901"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.596519 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.596955 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.597445 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lhs9k" podStartSLOduration=72.597424779 podStartE2EDuration="1m12.597424779s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.56299935 +0000 UTC m=+93.849693156" watchObservedRunningTime="2026-02-03 06:47:35.597424779 +0000 UTC m=+93.884118605" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.598042 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" podStartSLOduration=72.598033168 podStartE2EDuration="1m12.598033168s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.596176941 +0000 UTC m=+93.882870757" watchObservedRunningTime="2026-02-03 06:47:35.598033168 +0000 UTC m=+93.884726984" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.610069 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" event={"ID":"3fe2dd6d-16f7-4170-b830-b4d7406f9f30","Type":"ContainerStarted","Data":"793f1955cd0796a7fc4357f7ba5d6fc4cb4c1b0bd43caa1eeb09720a6629a098"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.619770 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.620558 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mp7nc" podStartSLOduration=73.620531 podStartE2EDuration="1m13.620531s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.620130878 +0000 UTC m=+93.906824704" watchObservedRunningTime="2026-02-03 06:47:35.620531 +0000 UTC m=+93.907224806" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.645820 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" event={"ID":"88a9a581-8243-41b1-8054-94ab0168c25e","Type":"ContainerStarted","Data":"6fd3de4b4f2c1fad7b03f3823b3db237768d595bbad37d2f6131caa3fdb362ee"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.677876 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.678022 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.177998859 +0000 UTC m=+94.464692675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.678179 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.678588 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.178577647 +0000 UTC m=+94.465271453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.689505 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" event={"ID":"86766b51-749e-474c-9bb5-f51af2ec045a","Type":"ContainerStarted","Data":"7413f964af6c802d3c25ad44cfe4c8b8101fb9ca92db93729ac76594603bf99f"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.693351 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6dxmx" event={"ID":"bfeeb90f-4d86-4455-baca-e23a1c85a4c5","Type":"ContainerStarted","Data":"7a456c06d6bf438b7eb9d3cad72a471a02fdb1a3fd7bab6249b781827e426718"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.707063 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" event={"ID":"c39b4ff4-9f98-4732-af70-115c9bb38cb6","Type":"ContainerStarted","Data":"b6fea65b9227dc04e004264cfc0f6fa63d972d42944eb917c666f525a1730a8b"} Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.711910 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" podStartSLOduration=72.711888902 podStartE2EDuration="1m12.711888902s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.711299964 +0000 UTC m=+93.997993790" watchObservedRunningTime="2026-02-03 06:47:35.711888902 +0000 UTC m=+93.998582708" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.720149 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9wjq" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.721845 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6mmvm" podStartSLOduration=73.721819958 podStartE2EDuration="1m13.721819958s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.685952734 +0000 UTC m=+93.972646560" watchObservedRunningTime="2026-02-03 06:47:35.721819958 +0000 UTC m=+94.008513754" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.722540 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.761048 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6dxmx" podStartSLOduration=6.761020464 podStartE2EDuration="6.761020464s" podCreationTimestamp="2026-02-03 06:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.735902591 +0000 UTC m=+94.022596407" watchObservedRunningTime="2026-02-03 06:47:35.761020464 +0000 UTC m=+94.047714270" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.779207 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.780878 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.280859415 +0000 UTC m=+94.567553221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.813660 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" podStartSLOduration=72.813639334 podStartE2EDuration="1m12.813639334s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:35.762296083 +0000 UTC m=+94.048989889" watchObservedRunningTime="2026-02-03 06:47:35.813639334 +0000 UTC m=+94.100333150" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.830389 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.838015 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.868750 4998 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hj4tg container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]log ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]etcd ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/generic-apiserver-start-informers ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/max-in-flight-filter ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 03 06:47:35 crc kubenswrapper[4998]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 03 06:47:35 crc kubenswrapper[4998]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/project.openshift.io-projectcache ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/openshift.io-startinformers ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 03 06:47:35 crc kubenswrapper[4998]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 03 06:47:35 crc kubenswrapper[4998]: livez check failed Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.868828 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" podUID="683b2874-b3e0-4844-a257-583438c3617c" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.891426 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.894550 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.394532073 +0000 UTC m=+94.681226049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.992928 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.993097 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.493067026 +0000 UTC m=+94.779760832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:35 crc kubenswrapper[4998]: I0203 06:47:35.993327 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:35 crc kubenswrapper[4998]: E0203 06:47:35.994592 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.493704486 +0000 UTC m=+94.780398292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.094076 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.094964 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.594942491 +0000 UTC m=+94.881636297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.138872 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.140613 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.140668 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.195995 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.196461 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.696441945 +0000 UTC m=+94.983135761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.296849 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.297010 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.79698839 +0000 UTC m=+95.083682206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.297192 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.297558 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.797545227 +0000 UTC m=+95.084239033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.398387 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.398608 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.898576627 +0000 UTC m=+95.185270483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.398808 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.399250 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.899241127 +0000 UTC m=+95.185934933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.499745 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.499922 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:36.999898135 +0000 UTC m=+95.286591941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.500190 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.500529 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.000514214 +0000 UTC m=+95.287208020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.600844 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.601033 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.100998016 +0000 UTC m=+95.387691822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.601115 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.601520 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.101507212 +0000 UTC m=+95.388201028 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.702242 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.702427 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.202400847 +0000 UTC m=+95.489094643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.702709 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.703049 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.203041447 +0000 UTC m=+95.489735243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.713727 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-57hdh" event={"ID":"f29b800f-24cd-4615-8692-3fd6e84ad338","Type":"ContainerStarted","Data":"801b915b0cb53047a85168ce41d5c6f7411a8694a6190ad91e3a81563507cd94"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.715273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" event={"ID":"ac2f1df9-6ba2-4684-9bd8-a7134f454945","Type":"ContainerStarted","Data":"3442ea22b4fc2cdda8e267b5f2f5d0d30351aa92b505bd3cb43c6f68f1611646"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.715594 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.716526 4998 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xwdnv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.716561 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" podUID="ac2f1df9-6ba2-4684-9bd8-a7134f454945" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.717181 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" event={"ID":"8aa1c556-e15a-44dc-a37e-d00eb1717bf2","Type":"ContainerStarted","Data":"d91d756f9827752b07ef4d35f2bf50a3b01d0c4aea5e6a455754bcc0f4886365"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.717330 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.719343 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" event={"ID":"3fe2dd6d-16f7-4170-b830-b4d7406f9f30","Type":"ContainerStarted","Data":"1ecf21624b356abf277262eea94f2b3f2c52886e153b899d886902e5d7445f5a"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.721504 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t6spv" event={"ID":"c39b4ff4-9f98-4732-af70-115c9bb38cb6","Type":"ContainerStarted","Data":"1615668933aa99fdf424c0e29ce1204d44408ec50327ea9f95e82456eb646222"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.723797 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" event={"ID":"847a84bb-a664-4310-82af-79849df6df67","Type":"ContainerStarted","Data":"daef41e88e3b8b80973ab433ab7257bb352c75b185f95b2e12461af563090221"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.723848 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" event={"ID":"847a84bb-a664-4310-82af-79849df6df67","Type":"ContainerStarted","Data":"444181c68300a3d6f3c85897f8f0e5489fe5b24f1b118c0b5f93cdc38e580939"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.725198 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" event={"ID":"86f0eb38-79ac-40a4-9000-caadc16a11a7","Type":"ContainerStarted","Data":"24bd4a33a76cc76f9546dd37ad23344df823fdfffb1e34302094fc45daf83953"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.725237 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" event={"ID":"86f0eb38-79ac-40a4-9000-caadc16a11a7","Type":"ContainerStarted","Data":"d6f4febde4edf8a9907f7df8437fc723e172a32eb56d72e6d1986ddff3e9c35a"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.725344 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.727761 4998 patch_prober.go:28] interesting pod/console-operator-58897d9998-pbbqh container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.727819 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" podUID="86f0eb38-79ac-40a4-9000-caadc16a11a7" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.728404 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cp5t2" event={"ID":"69e0bd0e-3395-4a06-92ab-ae2fa932902b","Type":"ContainerStarted","Data":"52cd638a2098250ad59503e5fccf9e21e39cdd09f64d1d956c484d1ae6785365"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.728457 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cp5t2" event={"ID":"69e0bd0e-3395-4a06-92ab-ae2fa932902b","Type":"ContainerStarted","Data":"12880de95ce505ee51474f2ce0be29bdc3ec428a1dd6043a0a380c0ee8a77d43"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.728478 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.728492 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cp5t2" event={"ID":"69e0bd0e-3395-4a06-92ab-ae2fa932902b","Type":"ContainerStarted","Data":"07aaf352349bcd510d9ac3776fd4f2b0b9a3539a35ae219bb3bb3fcadf8dc96a"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.730676 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" event={"ID":"c15ee4dd-48e2-423d-bbd4-0e136da1d313","Type":"ContainerStarted","Data":"748de687087e1a415fc9d7f2c2d1d0d71134315756ff3f5066206af75b2d8855"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.734352 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerStarted","Data":"3a64151fe912b987d3600efafaf1ba6a364f80d47a503b4ddf6cecedf9d90c0b"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.734405 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerStarted","Data":"ee64d83406b7c8fbf8e3c67093ff174bca2a983464a1d4791fecb2cbb6b33d8c"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.735415 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.736184 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" podStartSLOduration=73.736160656 podStartE2EDuration="1m13.736160656s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.735720503 +0000 UTC m=+95.022414319" watchObservedRunningTime="2026-02-03 06:47:36.736160656 +0000 UTC m=+95.022854462" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.737532 4998 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6d7sj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.737581 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.739190 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" event={"ID":"96c6de38-070f-49ec-83b2-5b0a239c4922","Type":"ContainerStarted","Data":"77d7d987c143a9ef02bda0fec6652bed9f891a7e2b3e98068b8e4e91d4b9493c"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.739240 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" event={"ID":"96c6de38-070f-49ec-83b2-5b0a239c4922","Type":"ContainerStarted","Data":"2d766287e3712984e095118fec5a76ad963d131ed8c3c69417ab6049a0794cb7"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.739254 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" event={"ID":"96c6de38-070f-49ec-83b2-5b0a239c4922","Type":"ContainerStarted","Data":"78bf5d6c41f53c346b8f735b74860995cf18cf48f0b9cfcba38c065e4380ef59"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.741728 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" event={"ID":"5a571eff-c1d3-4fc0-84e4-43cadf0c7979","Type":"ContainerStarted","Data":"3ba0e525e44872d49676bb37c726957410f36f5262a90d479d69331017f46227"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.746481 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ksr8m" event={"ID":"86766b51-749e-474c-9bb5-f51af2ec045a","Type":"ContainerStarted","Data":"9d1ba59b11c2e9bbba84fe8e30cd56c7921ecece8a8c76fd06df27f71e753423"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.752835 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" event={"ID":"4552ae6c-059f-4f4a-9f20-d6468bc275bc","Type":"ContainerStarted","Data":"6e20e431d6f1798739973cfdd95ebaf30418d996376a9a764512908349d18de5"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.752892 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" event={"ID":"4552ae6c-059f-4f4a-9f20-d6468bc275bc","Type":"ContainerStarted","Data":"bdf1ea7556097738106c1f87adb5b78954fff231352e7a0c05e74ffe7968ad19"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.752905 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" event={"ID":"4552ae6c-059f-4f4a-9f20-d6468bc275bc","Type":"ContainerStarted","Data":"1bb9c87f05d138d8c06ef474a91fa7e062cabbbe986d05cf8a3a8db5493bbf20"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.757340 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-p5fkm" event={"ID":"6579289f-8354-47bf-bcb2-50c252008c11","Type":"ContainerStarted","Data":"bb14df5946342331530297435b4070e4a9ff25d906e1bc23d6aec99c24bd2965"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.757381 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-p5fkm" event={"ID":"6579289f-8354-47bf-bcb2-50c252008c11","Type":"ContainerStarted","Data":"21fedf122b0384dc3add837e2bf4a0fdace3e805b245dd6981a64379008a9d65"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.776929 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" event={"ID":"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72","Type":"ContainerStarted","Data":"9061f16859bae15824473d0716a07ce28254a787f8d474ff00756562908df9e7"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.776985 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" event={"ID":"c23f4fd7-0bd7-4b4c-9697-b6bb583dba72","Type":"ContainerStarted","Data":"1362bab5307a8f96d2c9b189997983a361a3ed049a22018aee53f6e253a5e9fa"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.778991 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" event={"ID":"16dec508-e534-422d-8c43-75e9301ef843","Type":"ContainerStarted","Data":"f96b6b55856582ab1d29fca183c3fe252c36ab589cd67a41b6de34fc63160b31"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.785119 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" event={"ID":"18114187-6fca-483d-a5e6-e7bcb8c4d254","Type":"ContainerStarted","Data":"459adca2730d85a62ac4888202cfe8ab76145bf6e68987edde0503ad695815d2"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.791146 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mqrkr" podStartSLOduration=74.791129248 podStartE2EDuration="1m14.791129248s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.790110997 +0000 UTC m=+95.076804813" watchObservedRunningTime="2026-02-03 06:47:36.791129248 +0000 UTC m=+95.077823064" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.791600 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-cp5t2" podStartSLOduration=7.7915930719999995 podStartE2EDuration="7.791593072s" podCreationTimestamp="2026-02-03 06:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.765039575 +0000 UTC m=+95.051733391" watchObservedRunningTime="2026-02-03 06:47:36.791593072 +0000 UTC m=+95.078286888" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.794493 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" event={"ID":"584d6f48-0415-4b3f-813c-a22af4a339cb","Type":"ContainerStarted","Data":"c136ba36608fa213be855442b5d6b16c601b9ebe17844b5989486a1565772323"} Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.796194 4998 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-jppzk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.796235 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" podUID="ff1b3af9-22f7-4ac6-93a5-5794c7022f68" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.796601 4998 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-cv476 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.796628 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.803589 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.803889 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.30385607 +0000 UTC m=+95.590550016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.805327 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.808116 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.30810069 +0000 UTC m=+95.594794687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.818620 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-sxhrq" podStartSLOduration=73.818602704 podStartE2EDuration="1m13.818602704s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.817967064 +0000 UTC m=+95.104660880" watchObservedRunningTime="2026-02-03 06:47:36.818602704 +0000 UTC m=+95.105296510" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.877514 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tpbvt" podStartSLOduration=74.877496396 podStartE2EDuration="1m14.877496396s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.875543196 +0000 UTC m=+95.162237022" watchObservedRunningTime="2026-02-03 06:47:36.877496396 +0000 UTC m=+95.164190212" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.879417 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" podStartSLOduration=74.879404935 podStartE2EDuration="1m14.879404935s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.842113027 +0000 UTC m=+95.128806843" watchObservedRunningTime="2026-02-03 06:47:36.879404935 +0000 UTC m=+95.166098761" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.905356 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" podStartSLOduration=73.905339043 podStartE2EDuration="1m13.905339043s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.901674831 +0000 UTC m=+95.188368667" watchObservedRunningTime="2026-02-03 06:47:36.905339043 +0000 UTC m=+95.192032859" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.907601 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:36 crc kubenswrapper[4998]: E0203 06:47:36.917620 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.417599591 +0000 UTC m=+95.704293397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.962431 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-hf97k" podStartSLOduration=73.96241426 podStartE2EDuration="1m13.96241426s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.961192122 +0000 UTC m=+95.247885928" watchObservedRunningTime="2026-02-03 06:47:36.96241426 +0000 UTC m=+95.249108066" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.963416 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" podStartSLOduration=73.963410481 podStartE2EDuration="1m13.963410481s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.930032473 +0000 UTC m=+95.216726289" watchObservedRunningTime="2026-02-03 06:47:36.963410481 +0000 UTC m=+95.250104287" Feb 03 06:47:36 crc kubenswrapper[4998]: I0203 06:47:36.998268 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-p5fkm" podStartSLOduration=7.9982412929999995 podStartE2EDuration="7.998241293s" podCreationTimestamp="2026-02-03 06:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:36.984872371 +0000 UTC m=+95.271566187" watchObservedRunningTime="2026-02-03 06:47:36.998241293 +0000 UTC m=+95.284935099" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.011529 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.011991 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.511974915 +0000 UTC m=+95.798668721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.053691 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-l7cxx" podStartSLOduration=74.053671119 podStartE2EDuration="1m14.053671119s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:37.052947136 +0000 UTC m=+95.339640952" watchObservedRunningTime="2026-02-03 06:47:37.053671119 +0000 UTC m=+95.340364925" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.053889 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ffwz5" podStartSLOduration=74.053881885 podStartE2EDuration="1m14.053881885s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:37.027516364 +0000 UTC m=+95.314210180" watchObservedRunningTime="2026-02-03 06:47:37.053881885 +0000 UTC m=+95.340575691" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.076219 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fx6gz" podStartSLOduration=74.076196172 podStartE2EDuration="1m14.076196172s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:37.075405178 +0000 UTC m=+95.362098994" watchObservedRunningTime="2026-02-03 06:47:37.076196172 +0000 UTC m=+95.362889978" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.101997 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qshv4" podStartSLOduration=74.101981616 podStartE2EDuration="1m14.101981616s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:37.098708945 +0000 UTC m=+95.385402751" watchObservedRunningTime="2026-02-03 06:47:37.101981616 +0000 UTC m=+95.388675412" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.112575 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.113091 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.613070967 +0000 UTC m=+95.899764773 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.155296 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:37 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:37 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:37 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.155384 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.216308 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.216727 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.716712827 +0000 UTC m=+96.003406633 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.317958 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.318364 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.818349735 +0000 UTC m=+96.105043541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.419128 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.419448 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:37.919422866 +0000 UTC m=+96.206116672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.519749 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.520080 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.020063973 +0000 UTC m=+96.306757769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.621620 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.622016 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.12200035 +0000 UTC m=+96.408694156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.723890 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.724062 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.224035651 +0000 UTC m=+96.510729457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.724171 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.724512 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.224504135 +0000 UTC m=+96.511197941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.738489 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kchsz" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.796254 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" podStartSLOduration=74.796239133 podStartE2EDuration="1m14.796239133s" podCreationTimestamp="2026-02-03 06:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:37.126694336 +0000 UTC m=+95.413388152" watchObservedRunningTime="2026-02-03 06:47:37.796239133 +0000 UTC m=+96.082932939" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.800440 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" event={"ID":"16dec508-e534-422d-8c43-75e9301ef843","Type":"ContainerStarted","Data":"1f45daf9d7ca399066379decd74eabfaa96547922d60e8c01dab0f04e90fc022"} Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.801857 4998 patch_prober.go:28] interesting pod/console-operator-58897d9998-pbbqh container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.801904 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" podUID="86f0eb38-79ac-40a4-9000-caadc16a11a7" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.802059 4998 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6d7sj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.802106 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.823189 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xwdnv" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.825549 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.826998 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.326971879 +0000 UTC m=+96.613665675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.909550 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.910102 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zqbnn" Feb 03 06:47:37 crc kubenswrapper[4998]: I0203 06:47:37.927862 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:37 crc kubenswrapper[4998]: E0203 06:47:37.928182 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.428171234 +0000 UTC m=+96.714865040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.028408 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.028555 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.528531812 +0000 UTC m=+96.815225618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.028682 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.029056 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.529046878 +0000 UTC m=+96.815740684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.129797 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.130006 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.629978045 +0000 UTC m=+96.916671851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.130046 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.130387 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.630379327 +0000 UTC m=+96.917073133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.149321 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:38 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:38 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:38 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.149396 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.231315 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.231450 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.731433056 +0000 UTC m=+97.018126862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.231584 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.231937 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.731925531 +0000 UTC m=+97.018619337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.335224 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.335396 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.835366045 +0000 UTC m=+97.122059851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.335591 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.335911 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.835904742 +0000 UTC m=+97.122598548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.436305 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.436470 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.936449326 +0000 UTC m=+97.223143132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.436647 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.436913 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:38.93689948 +0000 UTC m=+97.223593276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.538113 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.538281 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.038253609 +0000 UTC m=+97.324947415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.538485 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.538816 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.038807706 +0000 UTC m=+97.325501512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.640313 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.640495 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.140471155 +0000 UTC m=+97.427164961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.640612 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.640946 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.140938009 +0000 UTC m=+97.427631815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.741566 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.741755 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.241729402 +0000 UTC m=+97.528423208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.741880 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.742200 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.242187926 +0000 UTC m=+97.528881732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.808082 4998 generic.go:334] "Generic (PLEG): container finished" podID="5a571eff-c1d3-4fc0-84e4-43cadf0c7979" containerID="3ba0e525e44872d49676bb37c726957410f36f5262a90d479d69331017f46227" exitCode=0 Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.808246 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" event={"ID":"5a571eff-c1d3-4fc0-84e4-43cadf0c7979","Type":"ContainerDied","Data":"3ba0e525e44872d49676bb37c726957410f36f5262a90d479d69331017f46227"} Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.808775 4998 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6d7sj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.808834 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.843182 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.843377 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.343350939 +0000 UTC m=+97.630044745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.844682 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.845521 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.345506136 +0000 UTC m=+97.632199942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.945392 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.945495 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.445479042 +0000 UTC m=+97.732172848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:38 crc kubenswrapper[4998]: I0203 06:47:38.945854 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:38 crc kubenswrapper[4998]: E0203 06:47:38.946152 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.446135163 +0000 UTC m=+97.732828969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.047383 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.047577 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.547551584 +0000 UTC m=+97.834245390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.047624 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.047946 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.547938596 +0000 UTC m=+97.834632402 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.094409 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.095374 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.110300 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.147996 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:39 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:39 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:39 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148073 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148426 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.148618 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.648598124 +0000 UTC m=+97.935292130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148652 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148753 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkk89\" (UniqueName: \"kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148870 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.148994 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.149309 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.649300405 +0000 UTC m=+97.935994211 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.174752 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.250202 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.250441 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkk89\" (UniqueName: \"kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.250567 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.250622 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.251098 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.251189 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.751171311 +0000 UTC m=+98.037865117 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.251740 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.310841 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkk89\" (UniqueName: \"kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89\") pod \"certified-operators-95844\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.344747 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.345954 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.351622 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.351662 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.351690 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.351711 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hnkq\" (UniqueName: \"kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.352269 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.852247712 +0000 UTC m=+98.138941558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.363278 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.365088 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.419070 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.470890 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.471271 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.471307 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.471359 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hnkq\" (UniqueName: \"kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.471438 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:39.971388519 +0000 UTC m=+98.258082335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.472466 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.483223 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.512873 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.514273 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.526663 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hnkq\" (UniqueName: \"kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq\") pod \"community-operators-zmxrc\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.548562 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.576018 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5zxx\" (UniqueName: \"kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.576070 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.576168 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.576204 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.576491 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.076479163 +0000 UTC m=+98.363172969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.670938 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.677049 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.677239 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.177204133 +0000 UTC m=+98.463897939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.677302 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.677364 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.677410 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5zxx\" (UniqueName: \"kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.677436 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.678033 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.678340 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.178325577 +0000 UTC m=+98.465019383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.678694 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.696087 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.704512 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.729146 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5zxx\" (UniqueName: \"kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx\") pod \"certified-operators-zk44q\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.737991 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.778588 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.778847 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.778890 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.779003 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl62j\" (UniqueName: \"kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.779117 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.279098029 +0000 UTC m=+98.565791835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.845119 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" event={"ID":"16dec508-e534-422d-8c43-75e9301ef843","Type":"ContainerStarted","Data":"533700854f2026cef04b0f83bf4cf0229b07cc5f836f6b82411e3ff9c274e27c"} Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.845181 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" event={"ID":"16dec508-e534-422d-8c43-75e9301ef843","Type":"ContainerStarted","Data":"baee4191999c598ac3d9763788c411e9c75c80fed71d0f0caad141637d34e6fb"} Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.855771 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.868059 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.893656 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl62j\" (UniqueName: \"kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.893701 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.893746 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.893800 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.894510 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.894723 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: E0203 06:47:39.894723 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.394703357 +0000 UTC m=+98.681397363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:39 crc kubenswrapper[4998]: I0203 06:47:39.934683 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl62j\" (UniqueName: \"kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j\") pod \"community-operators-z97nt\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:39 crc kubenswrapper[4998]: W0203 06:47:39.961292 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c650fe1_e6ec_4079_b7ab_a0fb0683ac03.slice/crio-299a490c6409a972a4db9069cba5ad9bd42e626f07252de9405ed23cd977561b WatchSource:0}: Error finding container 299a490c6409a972a4db9069cba5ad9bd42e626f07252de9405ed23cd977561b: Status 404 returned error can't find the container with id 299a490c6409a972a4db9069cba5ad9bd42e626f07252de9405ed23cd977561b Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:39.995265 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:39.995653 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.495638333 +0000 UTC m=+98.782332139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.040064 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.098381 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.098758 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.598743327 +0000 UTC m=+98.885437133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.143447 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:40 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:40 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:40 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.143723 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.203533 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.203915 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.703899953 +0000 UTC m=+98.990593759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.213214 4998 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.305256 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.305542 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.805529801 +0000 UTC m=+99.092223607 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.361605 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.405695 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.406043 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.906023504 +0000 UTC m=+99.192717310 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.406364 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.406673 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 06:47:40.906663244 +0000 UTC m=+99.193357050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2jbwn" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.507293 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.508598 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 06:47:41.00857882 +0000 UTC m=+99.295272636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.576173 4998 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-03T06:47:40.213239601Z","Handler":null,"Name":""} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.581239 4998 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.581275 4998 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.585111 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.599403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.611362 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume\") pod \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.611555 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume\") pod \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.611622 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8bh2\" (UniqueName: \"kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2\") pod \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\" (UID: \"5a571eff-c1d3-4fc0-84e4-43cadf0c7979\") " Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.611754 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.612123 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume" (OuterVolumeSpecName: "config-volume") pod "5a571eff-c1d3-4fc0-84e4-43cadf0c7979" (UID: "5a571eff-c1d3-4fc0-84e4-43cadf0c7979"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.616556 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.616755 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.616937 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5a571eff-c1d3-4fc0-84e4-43cadf0c7979" (UID: "5a571eff-c1d3-4fc0-84e4-43cadf0c7979"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.622901 4998 patch_prober.go:28] interesting pod/console-f9d7485db-2wmmh container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.622975 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2wmmh" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.623922 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2" (OuterVolumeSpecName: "kube-api-access-d8bh2") pod "5a571eff-c1d3-4fc0-84e4-43cadf0c7979" (UID: "5a571eff-c1d3-4fc0-84e4-43cadf0c7979"). InnerVolumeSpecName "kube-api-access-d8bh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.633509 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.633633 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.651718 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.715721 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.715744 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8bh2\" (UniqueName: \"kubernetes.io/projected/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-kube-api-access-d8bh2\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.715754 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a571eff-c1d3-4fc0-84e4-43cadf0c7979-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.733128 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2jbwn\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.755454 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 06:47:40 crc kubenswrapper[4998]: E0203 06:47:40.760105 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a571eff-c1d3-4fc0-84e4-43cadf0c7979" containerName="collect-profiles" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.760142 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a571eff-c1d3-4fc0-84e4-43cadf0c7979" containerName="collect-profiles" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.760235 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a571eff-c1d3-4fc0-84e4-43cadf0c7979" containerName="collect-profiles" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.760724 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.772377 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.772600 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.777020 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.820899 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.822079 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.822170 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.822298 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.829256 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7df2ae20-0aeb-4b1e-a408-c1903e061833-metrics-certs\") pod \"network-metrics-daemon-s5wml\" (UID: \"7df2ae20-0aeb-4b1e-a408-c1903e061833\") " pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.830551 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.838253 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.845883 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hj4tg" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.850319 4998 generic.go:334] "Generic (PLEG): container finished" podID="efee4303-e909-4341-b837-326fda8a64e1" containerID="0f140efc247f780bc41631c4c8b2db0c668f8dee04d347a02d897b31d7241177" exitCode=0 Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.850420 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerDied","Data":"0f140efc247f780bc41631c4c8b2db0c668f8dee04d347a02d897b31d7241177"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.850455 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerStarted","Data":"bf38f8d6c5ea4afb76eaa4deb5b018f452abdd17a130adc7772f521638c01b3b"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.851724 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.852212 4998 generic.go:334] "Generic (PLEG): container finished" podID="84a91c75-202c-449d-b70a-569fafdf09fa" containerID="dc02a2dca2616cdee6b614207023258868caaa98794edee6b9325a55087d2fd5" exitCode=0 Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.852268 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerDied","Data":"dc02a2dca2616cdee6b614207023258868caaa98794edee6b9325a55087d2fd5"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.852295 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerStarted","Data":"2b74b359d44a5e0d5f485850ba74fc4a20d94c860f5d3eb541a2303aabcebf2e"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.855663 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.855660 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm" event={"ID":"5a571eff-c1d3-4fc0-84e4-43cadf0c7979","Type":"ContainerDied","Data":"f7d5cc34a318a4c6e722dacc52b24460504349527660695f985351d1c2b575c0"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.856152 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7d5cc34a318a4c6e722dacc52b24460504349527660695f985351d1c2b575c0" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.860395 4998 generic.go:334] "Generic (PLEG): container finished" podID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerID="f404176cf0b908df41743d5208a4ea5503dcf40826d9e1af71c5372a6bf42d53" exitCode=0 Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.860727 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerDied","Data":"f404176cf0b908df41743d5208a4ea5503dcf40826d9e1af71c5372a6bf42d53"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.860806 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerStarted","Data":"299a490c6409a972a4db9069cba5ad9bd42e626f07252de9405ed23cd977561b"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.868028 4998 generic.go:334] "Generic (PLEG): container finished" podID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerID="bf5f761cde0063825dd9829609e2b4e05b4c049f03fcd9c503882452c9991553" exitCode=0 Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.868162 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerDied","Data":"bf5f761cde0063825dd9829609e2b4e05b4c049f03fcd9c503882452c9991553"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.868216 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerStarted","Data":"43be0648867f8bae82185861b40ea418b507bdfa4238624c219d7c4a1db6aebe"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.873755 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" event={"ID":"16dec508-e534-422d-8c43-75e9301ef843","Type":"ContainerStarted","Data":"f1979f2d8731288380abc4ab846a77950ba193e6f213c97011ac4287a0b067cd"} Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.924116 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.924168 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.928115 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:40 crc kubenswrapper[4998]: I0203 06:47:40.967886 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.000796 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.006184 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-6g4qm" podStartSLOduration=12.006166765 podStartE2EDuration="12.006166765s" podCreationTimestamp="2026-02-03 06:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:41.004313248 +0000 UTC m=+99.291007064" watchObservedRunningTime="2026-02-03 06:47:41.006166765 +0000 UTC m=+99.292860571" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.046276 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s5wml" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.091060 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.092100 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.094195 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.096676 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.103644 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.129248 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.129400 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkz2b\" (UniqueName: \"kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.129448 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.142419 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:41 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:41 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:41 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.142472 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.230861 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.231266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkz2b\" (UniqueName: \"kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.231293 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.231732 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.231887 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.250548 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkz2b\" (UniqueName: \"kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b\") pod \"redhat-marketplace-xhhkb\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.311425 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-s5wml"] Feb 03 06:47:41 crc kubenswrapper[4998]: W0203 06:47:41.320243 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7df2ae20_0aeb_4b1e_a408_c1903e061833.slice/crio-a77f5268054f78c0da6405ee704bb3282bf9a99bfb26da72124f21015600bf66 WatchSource:0}: Error finding container a77f5268054f78c0da6405ee704bb3282bf9a99bfb26da72124f21015600bf66: Status 404 returned error can't find the container with id a77f5268054f78c0da6405ee704bb3282bf9a99bfb26da72124f21015600bf66 Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.350916 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.433608 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.462844 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.486822 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.488102 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.500918 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.537104 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.537170 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzp47\" (UniqueName: \"kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.537194 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.639409 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.639940 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzp47\" (UniqueName: \"kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.639993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.640758 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.640849 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.659403 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzp47\" (UniqueName: \"kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47\") pod \"redhat-marketplace-br759\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.687082 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:47:41 crc kubenswrapper[4998]: W0203 06:47:41.700765 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2f8e3ac_85c1_42e7_8f9c_7c8f3e616696.slice/crio-058656f79c56160783cba0dc4191aa1fd0e516ed237cb316fcaa2f521046fcab WatchSource:0}: Error finding container 058656f79c56160783cba0dc4191aa1fd0e516ed237cb316fcaa2f521046fcab: Status 404 returned error can't find the container with id 058656f79c56160783cba0dc4191aa1fd0e516ed237cb316fcaa2f521046fcab Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.829384 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.887649 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" event={"ID":"b088ff98-5a5a-465d-ad48-9017fdebc2bf","Type":"ContainerStarted","Data":"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.887735 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" event={"ID":"b088ff98-5a5a-465d-ad48-9017fdebc2bf","Type":"ContainerStarted","Data":"dba47fbc8c7fff5cf599d5913324ffa3987e798cd36f2dea2b1c6df7c4b967a1"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.888026 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.893211 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"981b9cf9-466d-4b4c-b629-a49bf8968a84","Type":"ContainerStarted","Data":"9cbb397445e55f3da8452a7504a4838c1fe8655ee2114ee735f38575e2c5c2f8"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.893249 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"981b9cf9-466d-4b4c-b629-a49bf8968a84","Type":"ContainerStarted","Data":"45dd8f618ebdc360f5c1be4fac1cdc02141a30efcc53c7a8ab8142c9ef5375bb"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.909110 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" podStartSLOduration=79.909090003 podStartE2EDuration="1m19.909090003s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:41.906085651 +0000 UTC m=+100.192779457" watchObservedRunningTime="2026-02-03 06:47:41.909090003 +0000 UTC m=+100.195783809" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.929311 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.929290615 podStartE2EDuration="1.929290615s" podCreationTimestamp="2026-02-03 06:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:41.926129838 +0000 UTC m=+100.212823664" watchObservedRunningTime="2026-02-03 06:47:41.929290615 +0000 UTC m=+100.215984431" Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.944003 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerStarted","Data":"8089954fd218a5fad602012eac2a25203f15de08d351e3a0c3c388969545ccfe"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.944058 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerStarted","Data":"058656f79c56160783cba0dc4191aa1fd0e516ed237cb316fcaa2f521046fcab"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.954270 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s5wml" event={"ID":"7df2ae20-0aeb-4b1e-a408-c1903e061833","Type":"ContainerStarted","Data":"ca2995284de47baf96d03526f8bd88ad47c1741d9b437c9b19f03c35e68cd85d"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.954310 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s5wml" event={"ID":"7df2ae20-0aeb-4b1e-a408-c1903e061833","Type":"ContainerStarted","Data":"a77f5268054f78c0da6405ee704bb3282bf9a99bfb26da72124f21015600bf66"} Feb 03 06:47:41 crc kubenswrapper[4998]: I0203 06:47:41.995776 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-s5wml" podStartSLOduration=79.99575271 podStartE2EDuration="1m19.99575271s" podCreationTimestamp="2026-02-03 06:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:41.994160881 +0000 UTC m=+100.280854697" watchObservedRunningTime="2026-02-03 06:47:41.99575271 +0000 UTC m=+100.282446516" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.044515 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.044580 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.051619 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.051874 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.142518 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:42 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:42 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:42 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.142621 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.336012 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.456178 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.499207 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.501957 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.513230 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.514324 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.578842 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.578950 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vllrc\" (UniqueName: \"kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.579024 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.679893 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.679963 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vllrc\" (UniqueName: \"kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.680017 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.680535 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.680748 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.711081 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jppzk" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.715385 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vllrc\" (UniqueName: \"kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc\") pod \"redhat-operators-kqlxq\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.836095 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.912305 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.913704 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.938989 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.983384 4998 generic.go:334] "Generic (PLEG): container finished" podID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerID="f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39" exitCode=0 Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.983429 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerDied","Data":"f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39"} Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.983492 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerStarted","Data":"520c11b098661729592a414713f2327d2a84c54ce92ec3fe9d80f566cd35e5e6"} Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.985701 4998 generic.go:334] "Generic (PLEG): container finished" podID="981b9cf9-466d-4b4c-b629-a49bf8968a84" containerID="9cbb397445e55f3da8452a7504a4838c1fe8655ee2114ee735f38575e2c5c2f8" exitCode=0 Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.985775 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"981b9cf9-466d-4b4c-b629-a49bf8968a84","Type":"ContainerDied","Data":"9cbb397445e55f3da8452a7504a4838c1fe8655ee2114ee735f38575e2c5c2f8"} Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.987577 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d66bt\" (UniqueName: \"kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.987609 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:42 crc kubenswrapper[4998]: I0203 06:47:42.987632 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:42.993741 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerID="8089954fd218a5fad602012eac2a25203f15de08d351e3a0c3c388969545ccfe" exitCode=0 Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:42.993834 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerDied","Data":"8089954fd218a5fad602012eac2a25203f15de08d351e3a0c3c388969545ccfe"} Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.010332 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s5wml" event={"ID":"7df2ae20-0aeb-4b1e-a408-c1903e061833","Type":"ContainerStarted","Data":"a4cebf133cca9304f6a21768f72e13c12d67e7d40f75840f30d4909a910aff65"} Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.011704 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-pbbqh" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.088856 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d66bt\" (UniqueName: \"kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.088927 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.088968 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.093335 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.094043 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.110726 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.117685 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d66bt\" (UniqueName: \"kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt\") pod \"redhat-operators-4f2p5\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.141986 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.156075 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:43 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:43 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:43 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.156125 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.243580 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.339403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:47:43 crc kubenswrapper[4998]: I0203 06:47:43.428378 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.009190 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:47:44 crc kubenswrapper[4998]: W0203 06:47:44.052001 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73c4b34c_ece7_46c1_bce9_daf4c661b302.slice/crio-1fc27be1cca385f629f91c518d2e1972ac1b8216e8ac85062a5f90c436380302 WatchSource:0}: Error finding container 1fc27be1cca385f629f91c518d2e1972ac1b8216e8ac85062a5f90c436380302: Status 404 returned error can't find the container with id 1fc27be1cca385f629f91c518d2e1972ac1b8216e8ac85062a5f90c436380302 Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.122371 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerStarted","Data":"248c47ff93d9c7a9ebe0c8f5cbd8c890c67ffe1ac085bc9efb34b90859da82f2"} Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.146954 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:44 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:44 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:44 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.147039 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.522810 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.551629 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access\") pod \"981b9cf9-466d-4b4c-b629-a49bf8968a84\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.551673 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir\") pod \"981b9cf9-466d-4b4c-b629-a49bf8968a84\" (UID: \"981b9cf9-466d-4b4c-b629-a49bf8968a84\") " Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.552285 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "981b9cf9-466d-4b4c-b629-a49bf8968a84" (UID: "981b9cf9-466d-4b4c-b629-a49bf8968a84"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.559675 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "981b9cf9-466d-4b4c-b629-a49bf8968a84" (UID: "981b9cf9-466d-4b4c-b629-a49bf8968a84"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.653682 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/981b9cf9-466d-4b4c-b629-a49bf8968a84-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.653729 4998 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/981b9cf9-466d-4b4c-b629-a49bf8968a84-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.866384 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 06:47:44 crc kubenswrapper[4998]: E0203 06:47:44.866634 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="981b9cf9-466d-4b4c-b629-a49bf8968a84" containerName="pruner" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.866645 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="981b9cf9-466d-4b4c-b629-a49bf8968a84" containerName="pruner" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.866752 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="981b9cf9-466d-4b4c-b629-a49bf8968a84" containerName="pruner" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.867244 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.882222 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.882440 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.890359 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.958685 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:44 crc kubenswrapper[4998]: I0203 06:47:44.958783 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.064525 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.064630 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.064978 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.104874 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.141647 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:45 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:45 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:45 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.141720 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.142759 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"981b9cf9-466d-4b4c-b629-a49bf8968a84","Type":"ContainerDied","Data":"45dd8f618ebdc360f5c1be4fac1cdc02141a30efcc53c7a8ab8142c9ef5375bb"} Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.142880 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45dd8f618ebdc360f5c1be4fac1cdc02141a30efcc53c7a8ab8142c9ef5375bb" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.142939 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.153427 4998 generic.go:334] "Generic (PLEG): container finished" podID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerID="87278e48b9de53d6d1cf78284a5f0c3ff3bff4afc50069d979429220afb88c72" exitCode=0 Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.155342 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerDied","Data":"87278e48b9de53d6d1cf78284a5f0c3ff3bff4afc50069d979429220afb88c72"} Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.158333 4998 generic.go:334] "Generic (PLEG): container finished" podID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerID="754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611" exitCode=0 Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.158366 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerDied","Data":"754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611"} Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.158394 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerStarted","Data":"1fc27be1cca385f629f91c518d2e1972ac1b8216e8ac85062a5f90c436380302"} Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.196940 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:45 crc kubenswrapper[4998]: I0203 06:47:45.736681 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 06:47:45 crc kubenswrapper[4998]: W0203 06:47:45.772321 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod549c9142_1ec1_460d_9461_84112fa6436f.slice/crio-d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d WatchSource:0}: Error finding container d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d: Status 404 returned error can't find the container with id d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d Feb 03 06:47:46 crc kubenswrapper[4998]: I0203 06:47:46.142134 4998 patch_prober.go:28] interesting pod/router-default-5444994796-6dmdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 06:47:46 crc kubenswrapper[4998]: [-]has-synced failed: reason withheld Feb 03 06:47:46 crc kubenswrapper[4998]: [+]process-running ok Feb 03 06:47:46 crc kubenswrapper[4998]: healthz check failed Feb 03 06:47:46 crc kubenswrapper[4998]: I0203 06:47:46.142508 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6dmdn" podUID="fce530e9-fb75-4bb1-af35-292bcdf4bc59" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 06:47:46 crc kubenswrapper[4998]: I0203 06:47:46.180044 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"549c9142-1ec1-460d-9461-84112fa6436f","Type":"ContainerStarted","Data":"d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d"} Feb 03 06:47:47 crc kubenswrapper[4998]: I0203 06:47:47.145140 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:47 crc kubenswrapper[4998]: I0203 06:47:47.148566 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6dmdn" Feb 03 06:47:47 crc kubenswrapper[4998]: I0203 06:47:47.201176 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"549c9142-1ec1-460d-9461-84112fa6436f","Type":"ContainerStarted","Data":"2bee76458a0a388ed3e24da4008371ffb0c41d076f5c66a9e69d1f0392938828"} Feb 03 06:47:47 crc kubenswrapper[4998]: I0203 06:47:47.221387 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.221366059 podStartE2EDuration="3.221366059s" podCreationTimestamp="2026-02-03 06:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:47:47.219948625 +0000 UTC m=+105.506642441" watchObservedRunningTime="2026-02-03 06:47:47.221366059 +0000 UTC m=+105.508059875" Feb 03 06:47:47 crc kubenswrapper[4998]: I0203 06:47:47.842745 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-cp5t2" Feb 03 06:47:48 crc kubenswrapper[4998]: I0203 06:47:48.220346 4998 generic.go:334] "Generic (PLEG): container finished" podID="549c9142-1ec1-460d-9461-84112fa6436f" containerID="2bee76458a0a388ed3e24da4008371ffb0c41d076f5c66a9e69d1f0392938828" exitCode=0 Feb 03 06:47:48 crc kubenswrapper[4998]: I0203 06:47:48.220394 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"549c9142-1ec1-460d-9461-84112fa6436f","Type":"ContainerDied","Data":"2bee76458a0a388ed3e24da4008371ffb0c41d076f5c66a9e69d1f0392938828"} Feb 03 06:47:50 crc kubenswrapper[4998]: I0203 06:47:50.614334 4998 patch_prober.go:28] interesting pod/console-f9d7485db-2wmmh container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Feb 03 06:47:50 crc kubenswrapper[4998]: I0203 06:47:50.614728 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2wmmh" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Feb 03 06:47:52 crc kubenswrapper[4998]: I0203 06:47:52.043141 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:52 crc kubenswrapper[4998]: I0203 06:47:52.043206 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:52 crc kubenswrapper[4998]: I0203 06:47:52.047463 4998 patch_prober.go:28] interesting pod/downloads-7954f5f757-l249c container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Feb 03 06:47:52 crc kubenswrapper[4998]: I0203 06:47:52.047509 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l249c" podUID="9ed713f2-843a-4706-a6bc-2d59bd9d2c44" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.364489 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.465064 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access\") pod \"549c9142-1ec1-460d-9461-84112fa6436f\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.465129 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir\") pod \"549c9142-1ec1-460d-9461-84112fa6436f\" (UID: \"549c9142-1ec1-460d-9461-84112fa6436f\") " Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.465358 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "549c9142-1ec1-460d-9461-84112fa6436f" (UID: "549c9142-1ec1-460d-9461-84112fa6436f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.470227 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "549c9142-1ec1-460d-9461-84112fa6436f" (UID: "549c9142-1ec1-460d-9461-84112fa6436f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.567244 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/549c9142-1ec1-460d-9461-84112fa6436f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:55 crc kubenswrapper[4998]: I0203 06:47:55.567296 4998 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/549c9142-1ec1-460d-9461-84112fa6436f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:47:56 crc kubenswrapper[4998]: I0203 06:47:56.302489 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"549c9142-1ec1-460d-9461-84112fa6436f","Type":"ContainerDied","Data":"d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d"} Feb 03 06:47:56 crc kubenswrapper[4998]: I0203 06:47:56.303135 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d32be715a97f9349b7d2f33c38c7d1142972bec442a0daddd4cc6b00cd372b9d" Feb 03 06:47:56 crc kubenswrapper[4998]: I0203 06:47:56.302681 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 06:48:00 crc kubenswrapper[4998]: I0203 06:48:00.618204 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:48:00 crc kubenswrapper[4998]: I0203 06:48:00.626627 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:48:01 crc kubenswrapper[4998]: I0203 06:48:01.006860 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:48:02 crc kubenswrapper[4998]: I0203 06:48:02.067438 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-l249c" Feb 03 06:48:12 crc kubenswrapper[4998]: I0203 06:48:12.766136 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hgcng" Feb 03 06:48:14 crc kubenswrapper[4998]: E0203 06:48:14.313799 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 03 06:48:14 crc kubenswrapper[4998]: E0203 06:48:14.314355 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tzp47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-br759_openshift-marketplace(6ca9561b-1685-47eb-af5c-1eaf0266920d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:14 crc kubenswrapper[4998]: E0203 06:48:14.315532 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-br759" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" Feb 03 06:48:17 crc kubenswrapper[4998]: E0203 06:48:17.569518 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-br759" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.859198 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 06:48:20 crc kubenswrapper[4998]: E0203 06:48:20.860057 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549c9142-1ec1-460d-9461-84112fa6436f" containerName="pruner" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.860077 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="549c9142-1ec1-460d-9461-84112fa6436f" containerName="pruner" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.860220 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="549c9142-1ec1-460d-9461-84112fa6436f" containerName="pruner" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.860607 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.860697 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.863300 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.863548 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.938589 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:20 crc kubenswrapper[4998]: I0203 06:48:20.938664 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: I0203 06:48:21.040345 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: I0203 06:48:21.040417 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: I0203 06:48:21.040607 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: I0203 06:48:21.059164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: I0203 06:48:21.237507 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.354723 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.354913 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vllrc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kqlxq_openshift-marketplace(d8395812-3fcd-41cd-910d-f195e7d94fbc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.356902 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kqlxq" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.426154 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.426407 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9hnkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zmxrc_openshift-marketplace(efee4303-e909-4341-b837-326fda8a64e1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:21 crc kubenswrapper[4998]: E0203 06:48:21.427798 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zmxrc" podUID="efee4303-e909-4341-b837-326fda8a64e1" Feb 03 06:48:22 crc kubenswrapper[4998]: E0203 06:48:22.544680 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 03 06:48:22 crc kubenswrapper[4998]: E0203 06:48:22.545324 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d66bt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4f2p5_openshift-marketplace(73c4b34c-ece7-46c1-bce9-daf4c661b302): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:22 crc kubenswrapper[4998]: E0203 06:48:22.546484 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4f2p5" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" Feb 03 06:48:25 crc kubenswrapper[4998]: E0203 06:48:25.455256 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zmxrc" podUID="efee4303-e909-4341-b837-326fda8a64e1" Feb 03 06:48:25 crc kubenswrapper[4998]: E0203 06:48:25.455266 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kqlxq" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" Feb 03 06:48:25 crc kubenswrapper[4998]: E0203 06:48:25.470740 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 03 06:48:25 crc kubenswrapper[4998]: E0203 06:48:25.471051 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mkz2b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-xhhkb_openshift-marketplace(c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:25 crc kubenswrapper[4998]: E0203 06:48:25.473121 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-xhhkb" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.649890 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.650518 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.660894 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.809545 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.809598 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.809841 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.874187 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.910535 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.910732 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.911148 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.911173 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.911619 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.936061 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access\") pod \"installer-9-crc\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:25 crc kubenswrapper[4998]: I0203 06:48:25.987167 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:48:26 crc kubenswrapper[4998]: I0203 06:48:26.469658 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0","Type":"ContainerStarted","Data":"59ea7734e480362a4c54493cb4c85139dd1c56398f713a54d1b754b332bfb310"} Feb 03 06:48:26 crc kubenswrapper[4998]: E0203 06:48:26.471173 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-xhhkb" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" Feb 03 06:48:26 crc kubenswrapper[4998]: I0203 06:48:26.527889 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 06:48:27 crc kubenswrapper[4998]: I0203 06:48:27.476447 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0","Type":"ContainerStarted","Data":"436014ebabd4102a1abb05073e1cf50cd23c852e9bc3975e831e7770b7aa4460"} Feb 03 06:48:27 crc kubenswrapper[4998]: I0203 06:48:27.477497 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"55150a43-6d33-4ac4-b65e-04c8e15a8376","Type":"ContainerStarted","Data":"de5ca64e8513bc4e227873f25181c6f0d7c0bfa86696c7c9c1c4697861207caf"} Feb 03 06:48:28 crc kubenswrapper[4998]: E0203 06:48:28.608719 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 03 06:48:28 crc kubenswrapper[4998]: E0203 06:48:28.609337 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q5zxx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-zk44q_openshift-marketplace(f676cd7f-61b9-43e5-9ca0-4a3deddecff3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:28 crc kubenswrapper[4998]: E0203 06:48:28.610630 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-zk44q" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" Feb 03 06:48:29 crc kubenswrapper[4998]: I0203 06:48:29.491696 4998 generic.go:334] "Generic (PLEG): container finished" podID="a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" containerID="436014ebabd4102a1abb05073e1cf50cd23c852e9bc3975e831e7770b7aa4460" exitCode=0 Feb 03 06:48:29 crc kubenswrapper[4998]: I0203 06:48:29.491770 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0","Type":"ContainerDied","Data":"436014ebabd4102a1abb05073e1cf50cd23c852e9bc3975e831e7770b7aa4460"} Feb 03 06:48:29 crc kubenswrapper[4998]: I0203 06:48:29.493306 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"55150a43-6d33-4ac4-b65e-04c8e15a8376","Type":"ContainerStarted","Data":"c61928c2032bdb02b0da4ca31bf3f630c6602b64b9f89f980ce7de6dfde5b074"} Feb 03 06:48:29 crc kubenswrapper[4998]: E0203 06:48:29.494931 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-zk44q" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" Feb 03 06:48:29 crc kubenswrapper[4998]: I0203 06:48:29.519195 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.519177093 podStartE2EDuration="4.519177093s" podCreationTimestamp="2026-02-03 06:48:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:48:29.518640786 +0000 UTC m=+147.805334622" watchObservedRunningTime="2026-02-03 06:48:29.519177093 +0000 UTC m=+147.805870899" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.131505 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.132907 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkk89,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-95844_openshift-marketplace(3c650fe1-e6ec-4079-b7ab-a0fb0683ac03): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.134367 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-95844" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.188528 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.188874 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vl62j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-z97nt_openshift-marketplace(84a91c75-202c-449d-b70a-569fafdf09fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.190344 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-z97nt" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.378087 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.378235 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.378309 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.378379 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.380707 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.380755 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.381241 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.389697 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.390142 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.395145 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.402584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.402622 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.453721 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.466164 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.473169 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.503267 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-95844" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" Feb 03 06:48:30 crc kubenswrapper[4998]: E0203 06:48:30.504709 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-z97nt" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.799661 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.885305 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir\") pod \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.885372 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access\") pod \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\" (UID: \"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0\") " Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.885466 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" (UID: "a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.891795 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" (UID: "a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.986274 4998 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:30 crc kubenswrapper[4998]: I0203 06:48:30.986300 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.517691 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fcacdea2e0f7b1adc50702c8757382b75c293865ba9a10a7af4cfd31b678fa5f"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.518155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e8da82c9eb188510026fbad38b1c1414ac68364ae504d3694fef51a7dac0939c"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.519866 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.519889 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0","Type":"ContainerDied","Data":"59ea7734e480362a4c54493cb4c85139dd1c56398f713a54d1b754b332bfb310"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.519961 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59ea7734e480362a4c54493cb4c85139dd1c56398f713a54d1b754b332bfb310" Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.524602 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5f54fe6c2aa01d57ddcca49c7faed4ac220a73cbabbbd95a37964603d7480aab"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.524639 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0a38e687c7606d2993e7633e5788a02c6e14c8c774eeb548d62581e288e9e34a"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.524906 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.526398 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"80d95cb5b44ae562b584250358fcb2db382beddb6084bd196520a7a3b936c235"} Feb 03 06:48:31 crc kubenswrapper[4998]: I0203 06:48:31.526426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1759dd1c0dfe8fae86751589997e25d9da6408b5e36bcd20d7ff5b8f8ba52387"} Feb 03 06:48:32 crc kubenswrapper[4998]: I0203 06:48:32.532938 4998 generic.go:334] "Generic (PLEG): container finished" podID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerID="d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b" exitCode=0 Feb 03 06:48:32 crc kubenswrapper[4998]: I0203 06:48:32.533090 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerDied","Data":"d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b"} Feb 03 06:48:33 crc kubenswrapper[4998]: I0203 06:48:33.541891 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerStarted","Data":"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690"} Feb 03 06:48:33 crc kubenswrapper[4998]: I0203 06:48:33.582517 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-br759" podStartSLOduration=2.647992805 podStartE2EDuration="52.582501704s" podCreationTimestamp="2026-02-03 06:47:41 +0000 UTC" firstStartedPulling="2026-02-03 06:47:42.984901023 +0000 UTC m=+101.271594829" lastFinishedPulling="2026-02-03 06:48:32.919409922 +0000 UTC m=+151.206103728" observedRunningTime="2026-02-03 06:48:33.580074157 +0000 UTC m=+151.866767973" watchObservedRunningTime="2026-02-03 06:48:33.582501704 +0000 UTC m=+151.869195520" Feb 03 06:48:35 crc kubenswrapper[4998]: I0203 06:48:35.552803 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerStarted","Data":"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87"} Feb 03 06:48:36 crc kubenswrapper[4998]: I0203 06:48:36.560898 4998 generic.go:334] "Generic (PLEG): container finished" podID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerID="0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87" exitCode=0 Feb 03 06:48:36 crc kubenswrapper[4998]: I0203 06:48:36.560978 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerDied","Data":"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87"} Feb 03 06:48:37 crc kubenswrapper[4998]: I0203 06:48:37.572820 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerStarted","Data":"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d"} Feb 03 06:48:37 crc kubenswrapper[4998]: I0203 06:48:37.591820 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4f2p5" podStartSLOduration=4.790939393 podStartE2EDuration="55.59180345s" podCreationTimestamp="2026-02-03 06:47:42 +0000 UTC" firstStartedPulling="2026-02-03 06:47:46.182085673 +0000 UTC m=+104.468779479" lastFinishedPulling="2026-02-03 06:48:36.98294972 +0000 UTC m=+155.269643536" observedRunningTime="2026-02-03 06:48:37.589320601 +0000 UTC m=+155.876014407" watchObservedRunningTime="2026-02-03 06:48:37.59180345 +0000 UTC m=+155.878497256" Feb 03 06:48:41 crc kubenswrapper[4998]: I0203 06:48:41.829826 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:41 crc kubenswrapper[4998]: I0203 06:48:41.830159 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:41 crc kubenswrapper[4998]: I0203 06:48:41.998795 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.597345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerStarted","Data":"76eed354837e0d6bbcdef3f959478b2fcd971a0268ae7241b2c5ad77979d41d9"} Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.599071 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerID="24fad0b768cb95bca766b104ebf7737cffd4c8de4cdb6e8d354ce1f1e059eefc" exitCode=0 Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.599120 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerDied","Data":"24fad0b768cb95bca766b104ebf7737cffd4c8de4cdb6e8d354ce1f1e059eefc"} Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.602337 4998 generic.go:334] "Generic (PLEG): container finished" podID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerID="f9e88d4aa9b6a0f05db89e38e6fa188c9bf9a8ea2f7a748912c0b6fcc1c4a198" exitCode=0 Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.602401 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerDied","Data":"f9e88d4aa9b6a0f05db89e38e6fa188c9bf9a8ea2f7a748912c0b6fcc1c4a198"} Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.604329 4998 generic.go:334] "Generic (PLEG): container finished" podID="efee4303-e909-4341-b837-326fda8a64e1" containerID="8f1c1579cb5978633a91a18b85320642c24c0b9062f57f8d15ad47540cc08f24" exitCode=0 Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.604908 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerDied","Data":"8f1c1579cb5978633a91a18b85320642c24c0b9062f57f8d15ad47540cc08f24"} Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.645280 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.753962 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:48:42 crc kubenswrapper[4998]: I0203 06:48:42.754017 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.243846 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.244178 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.285612 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.611257 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerStarted","Data":"fc34d709e8c1399b599bb382561f6fb3dd4004933a99ec29dbaed46c46dd2253"} Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.612637 4998 generic.go:334] "Generic (PLEG): container finished" podID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerID="76eed354837e0d6bbcdef3f959478b2fcd971a0268ae7241b2c5ad77979d41d9" exitCode=0 Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.612687 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerDied","Data":"76eed354837e0d6bbcdef3f959478b2fcd971a0268ae7241b2c5ad77979d41d9"} Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.614510 4998 generic.go:334] "Generic (PLEG): container finished" podID="84a91c75-202c-449d-b70a-569fafdf09fa" containerID="e270093bf7c337ebb29e2174176841dd30a631f77f9d194a7e87d6c6ec08ed10" exitCode=0 Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.614572 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerDied","Data":"e270093bf7c337ebb29e2174176841dd30a631f77f9d194a7e87d6c6ec08ed10"} Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.616689 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerStarted","Data":"ef84e3dbfc1404c2511bafce7241163047187d2de6bcaef604eeebdd4b6af1dc"} Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.620570 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerStarted","Data":"d5d763553f402056a1745129e08bfe5cd0f77516499e44edb41b54484706a5f6"} Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.636262 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zmxrc" podStartSLOduration=2.495589293 podStartE2EDuration="1m4.636244147s" podCreationTimestamp="2026-02-03 06:47:39 +0000 UTC" firstStartedPulling="2026-02-03 06:47:40.851516935 +0000 UTC m=+99.138210731" lastFinishedPulling="2026-02-03 06:48:42.992171779 +0000 UTC m=+161.278865585" observedRunningTime="2026-02-03 06:48:43.634595515 +0000 UTC m=+161.921289331" watchObservedRunningTime="2026-02-03 06:48:43.636244147 +0000 UTC m=+161.922937953" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.665958 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.670912 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zk44q" podStartSLOduration=2.483316615 podStartE2EDuration="1m4.670897367s" podCreationTimestamp="2026-02-03 06:47:39 +0000 UTC" firstStartedPulling="2026-02-03 06:47:40.870008384 +0000 UTC m=+99.156702190" lastFinishedPulling="2026-02-03 06:48:43.057589116 +0000 UTC m=+161.344282942" observedRunningTime="2026-02-03 06:48:43.670290508 +0000 UTC m=+161.956984314" watchObservedRunningTime="2026-02-03 06:48:43.670897367 +0000 UTC m=+161.957591173" Feb 03 06:48:43 crc kubenswrapper[4998]: I0203 06:48:43.705562 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xhhkb" podStartSLOduration=1.6073575020000002 podStartE2EDuration="1m2.705544587s" podCreationTimestamp="2026-02-03 06:47:41 +0000 UTC" firstStartedPulling="2026-02-03 06:47:41.949972121 +0000 UTC m=+100.236665927" lastFinishedPulling="2026-02-03 06:48:43.048159206 +0000 UTC m=+161.334853012" observedRunningTime="2026-02-03 06:48:43.704613558 +0000 UTC m=+161.991307364" watchObservedRunningTime="2026-02-03 06:48:43.705544587 +0000 UTC m=+161.992238393" Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.626793 4998 generic.go:334] "Generic (PLEG): container finished" podID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerID="286a761b83ed06f6d8d841dd603c0b25772806ff269bb0d368f8fe2875c6c38d" exitCode=0 Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.627021 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerDied","Data":"286a761b83ed06f6d8d841dd603c0b25772806ff269bb0d368f8fe2875c6c38d"} Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.631217 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerStarted","Data":"955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503"} Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.636057 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerStarted","Data":"834ba1f164f2ee235971f9ad25ef418d7877a1a0119ce1ddff87524a4cfb565a"} Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.671984 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kqlxq" podStartSLOduration=3.739054703 podStartE2EDuration="1m2.671962249s" podCreationTimestamp="2026-02-03 06:47:42 +0000 UTC" firstStartedPulling="2026-02-03 06:47:45.157979115 +0000 UTC m=+103.444672921" lastFinishedPulling="2026-02-03 06:48:44.090886661 +0000 UTC m=+162.377580467" observedRunningTime="2026-02-03 06:48:44.671817064 +0000 UTC m=+162.958510890" watchObservedRunningTime="2026-02-03 06:48:44.671962249 +0000 UTC m=+162.958656065" Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.703762 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z97nt" podStartSLOduration=2.546144945 podStartE2EDuration="1m5.703741838s" podCreationTimestamp="2026-02-03 06:47:39 +0000 UTC" firstStartedPulling="2026-02-03 06:47:40.85461469 +0000 UTC m=+99.141308496" lastFinishedPulling="2026-02-03 06:48:44.012211583 +0000 UTC m=+162.298905389" observedRunningTime="2026-02-03 06:48:44.702077125 +0000 UTC m=+162.988770951" watchObservedRunningTime="2026-02-03 06:48:44.703741838 +0000 UTC m=+162.990435654" Feb 03 06:48:44 crc kubenswrapper[4998]: I0203 06:48:44.748604 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:48:45 crc kubenswrapper[4998]: I0203 06:48:45.651908 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerStarted","Data":"0f407394414efd64399dc958afb415ab01c98dfc9f82e4de347d2d777547a93f"} Feb 03 06:48:45 crc kubenswrapper[4998]: I0203 06:48:45.652172 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4f2p5" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="registry-server" containerID="cri-o://aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d" gracePeriod=2 Feb 03 06:48:45 crc kubenswrapper[4998]: I0203 06:48:45.682828 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-95844" podStartSLOduration=2.535157079 podStartE2EDuration="1m6.682810981s" podCreationTimestamp="2026-02-03 06:47:39 +0000 UTC" firstStartedPulling="2026-02-03 06:47:40.861681608 +0000 UTC m=+99.148375414" lastFinishedPulling="2026-02-03 06:48:45.00933551 +0000 UTC m=+163.296029316" observedRunningTime="2026-02-03 06:48:45.681002094 +0000 UTC m=+163.967695900" watchObservedRunningTime="2026-02-03 06:48:45.682810981 +0000 UTC m=+163.969504807" Feb 03 06:48:45 crc kubenswrapper[4998]: I0203 06:48:45.984173 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.156098 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d66bt\" (UniqueName: \"kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt\") pod \"73c4b34c-ece7-46c1-bce9-daf4c661b302\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.156162 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content\") pod \"73c4b34c-ece7-46c1-bce9-daf4c661b302\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.156231 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities\") pod \"73c4b34c-ece7-46c1-bce9-daf4c661b302\" (UID: \"73c4b34c-ece7-46c1-bce9-daf4c661b302\") " Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.157242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities" (OuterVolumeSpecName: "utilities") pod "73c4b34c-ece7-46c1-bce9-daf4c661b302" (UID: "73c4b34c-ece7-46c1-bce9-daf4c661b302"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.162290 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt" (OuterVolumeSpecName: "kube-api-access-d66bt") pod "73c4b34c-ece7-46c1-bce9-daf4c661b302" (UID: "73c4b34c-ece7-46c1-bce9-daf4c661b302"). InnerVolumeSpecName "kube-api-access-d66bt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.257710 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.257742 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d66bt\" (UniqueName: \"kubernetes.io/projected/73c4b34c-ece7-46c1-bce9-daf4c661b302-kube-api-access-d66bt\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.300747 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73c4b34c-ece7-46c1-bce9-daf4c661b302" (UID: "73c4b34c-ece7-46c1-bce9-daf4c661b302"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.359096 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c4b34c-ece7-46c1-bce9-daf4c661b302-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.659336 4998 generic.go:334] "Generic (PLEG): container finished" podID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerID="aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d" exitCode=0 Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.659398 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerDied","Data":"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d"} Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.659428 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f2p5" event={"ID":"73c4b34c-ece7-46c1-bce9-daf4c661b302","Type":"ContainerDied","Data":"1fc27be1cca385f629f91c518d2e1972ac1b8216e8ac85062a5f90c436380302"} Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.659447 4998 scope.go:117] "RemoveContainer" containerID="aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.659449 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f2p5" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.674949 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.681097 4998 scope.go:117] "RemoveContainer" containerID="0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.683062 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4f2p5"] Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.695377 4998 scope.go:117] "RemoveContainer" containerID="754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.712103 4998 scope.go:117] "RemoveContainer" containerID="aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d" Feb 03 06:48:46 crc kubenswrapper[4998]: E0203 06:48:46.719589 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d\": container with ID starting with aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d not found: ID does not exist" containerID="aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.719725 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d"} err="failed to get container status \"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d\": rpc error: code = NotFound desc = could not find container \"aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d\": container with ID starting with aa569e7213a150e01d22361c480075bc658ac8c95c8667b522d2f22a1811a72d not found: ID does not exist" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.719859 4998 scope.go:117] "RemoveContainer" containerID="0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87" Feb 03 06:48:46 crc kubenswrapper[4998]: E0203 06:48:46.727047 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87\": container with ID starting with 0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87 not found: ID does not exist" containerID="0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.727093 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87"} err="failed to get container status \"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87\": rpc error: code = NotFound desc = could not find container \"0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87\": container with ID starting with 0a8e7321e0f603a42b3d2df7a81ffb674d4c81500e45477afddc42723e767a87 not found: ID does not exist" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.727123 4998 scope.go:117] "RemoveContainer" containerID="754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611" Feb 03 06:48:46 crc kubenswrapper[4998]: E0203 06:48:46.727469 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611\": container with ID starting with 754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611 not found: ID does not exist" containerID="754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.727570 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611"} err="failed to get container status \"754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611\": rpc error: code = NotFound desc = could not find container \"754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611\": container with ID starting with 754cb93dedc27a0efd10e3a5afe8eecb298da972f7788df58155723da3aaa611 not found: ID does not exist" Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.947839 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:48:46 crc kubenswrapper[4998]: I0203 06:48:46.948243 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-br759" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="registry-server" containerID="cri-o://24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690" gracePeriod=2 Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.321065 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.472058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content\") pod \"6ca9561b-1685-47eb-af5c-1eaf0266920d\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.472249 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities\") pod \"6ca9561b-1685-47eb-af5c-1eaf0266920d\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.472295 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzp47\" (UniqueName: \"kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47\") pod \"6ca9561b-1685-47eb-af5c-1eaf0266920d\" (UID: \"6ca9561b-1685-47eb-af5c-1eaf0266920d\") " Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.472951 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities" (OuterVolumeSpecName: "utilities") pod "6ca9561b-1685-47eb-af5c-1eaf0266920d" (UID: "6ca9561b-1685-47eb-af5c-1eaf0266920d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.478882 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47" (OuterVolumeSpecName: "kube-api-access-tzp47") pod "6ca9561b-1685-47eb-af5c-1eaf0266920d" (UID: "6ca9561b-1685-47eb-af5c-1eaf0266920d"). InnerVolumeSpecName "kube-api-access-tzp47". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.499329 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ca9561b-1685-47eb-af5c-1eaf0266920d" (UID: "6ca9561b-1685-47eb-af5c-1eaf0266920d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.573923 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.574060 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzp47\" (UniqueName: \"kubernetes.io/projected/6ca9561b-1685-47eb-af5c-1eaf0266920d-kube-api-access-tzp47\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.574076 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ca9561b-1685-47eb-af5c-1eaf0266920d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.666909 4998 generic.go:334] "Generic (PLEG): container finished" podID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerID="24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690" exitCode=0 Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.666992 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-br759" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.667005 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerDied","Data":"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690"} Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.667039 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-br759" event={"ID":"6ca9561b-1685-47eb-af5c-1eaf0266920d","Type":"ContainerDied","Data":"520c11b098661729592a414713f2327d2a84c54ce92ec3fe9d80f566cd35e5e6"} Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.667059 4998 scope.go:117] "RemoveContainer" containerID="24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.682282 4998 scope.go:117] "RemoveContainer" containerID="d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.692332 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.697454 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-br759"] Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.699174 4998 scope.go:117] "RemoveContainer" containerID="f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.716286 4998 scope.go:117] "RemoveContainer" containerID="24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690" Feb 03 06:48:47 crc kubenswrapper[4998]: E0203 06:48:47.716815 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690\": container with ID starting with 24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690 not found: ID does not exist" containerID="24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.716848 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690"} err="failed to get container status \"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690\": rpc error: code = NotFound desc = could not find container \"24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690\": container with ID starting with 24d6044ef2f76d9ded1531fd7df89001921f816beeeade849ae2fd21eb1d4690 not found: ID does not exist" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.716877 4998 scope.go:117] "RemoveContainer" containerID="d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b" Feb 03 06:48:47 crc kubenswrapper[4998]: E0203 06:48:47.717189 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b\": container with ID starting with d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b not found: ID does not exist" containerID="d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.717219 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b"} err="failed to get container status \"d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b\": rpc error: code = NotFound desc = could not find container \"d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b\": container with ID starting with d787a17619bfffaeb455fddc020273b8969b22376a0052d685ce0a0472850e0b not found: ID does not exist" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.717240 4998 scope.go:117] "RemoveContainer" containerID="f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39" Feb 03 06:48:47 crc kubenswrapper[4998]: E0203 06:48:47.717532 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39\": container with ID starting with f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39 not found: ID does not exist" containerID="f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39" Feb 03 06:48:47 crc kubenswrapper[4998]: I0203 06:48:47.717557 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39"} err="failed to get container status \"f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39\": rpc error: code = NotFound desc = could not find container \"f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39\": container with ID starting with f20b614c5a9480046315cf12ba4099415ff41a5f00cc36e661827510145c9e39 not found: ID does not exist" Feb 03 06:48:48 crc kubenswrapper[4998]: I0203 06:48:48.435232 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" path="/var/lib/kubelet/pods/6ca9561b-1685-47eb-af5c-1eaf0266920d/volumes" Feb 03 06:48:48 crc kubenswrapper[4998]: I0203 06:48:48.436033 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" path="/var/lib/kubelet/pods/73c4b34c-ece7-46c1-bce9-daf4c661b302/volumes" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.419735 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.419806 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.458753 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.672860 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.673207 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.710525 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.760043 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.856322 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.856358 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:49 crc kubenswrapper[4998]: I0203 06:48:49.903408 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:50 crc kubenswrapper[4998]: I0203 06:48:50.040753 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:50 crc kubenswrapper[4998]: I0203 06:48:50.040825 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:50 crc kubenswrapper[4998]: I0203 06:48:50.086838 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:50 crc kubenswrapper[4998]: I0203 06:48:50.733597 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:50 crc kubenswrapper[4998]: I0203 06:48:50.758672 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:51 crc kubenswrapper[4998]: I0203 06:48:51.463943 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:48:51 crc kubenswrapper[4998]: I0203 06:48:51.464279 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:48:51 crc kubenswrapper[4998]: I0203 06:48:51.498390 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:48:51 crc kubenswrapper[4998]: I0203 06:48:51.740262 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:48:52 crc kubenswrapper[4998]: I0203 06:48:52.837746 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:48:52 crc kubenswrapper[4998]: I0203 06:48:52.837819 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:48:52 crc kubenswrapper[4998]: I0203 06:48:52.891296 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.149497 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.149766 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z97nt" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="registry-server" containerID="cri-o://834ba1f164f2ee235971f9ad25ef418d7877a1a0119ce1ddff87524a4cfb565a" gracePeriod=2 Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.348948 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.349214 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zk44q" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="registry-server" containerID="cri-o://d5d763553f402056a1745129e08bfe5cd0f77516499e44edb41b54484706a5f6" gracePeriod=2 Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.708855 4998 generic.go:334] "Generic (PLEG): container finished" podID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerID="d5d763553f402056a1745129e08bfe5cd0f77516499e44edb41b54484706a5f6" exitCode=0 Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.708956 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerDied","Data":"d5d763553f402056a1745129e08bfe5cd0f77516499e44edb41b54484706a5f6"} Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.711736 4998 generic.go:334] "Generic (PLEG): container finished" podID="84a91c75-202c-449d-b70a-569fafdf09fa" containerID="834ba1f164f2ee235971f9ad25ef418d7877a1a0119ce1ddff87524a4cfb565a" exitCode=0 Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.711803 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerDied","Data":"834ba1f164f2ee235971f9ad25ef418d7877a1a0119ce1ddff87524a4cfb565a"} Feb 03 06:48:53 crc kubenswrapper[4998]: I0203 06:48:53.763919 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.089556 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.274350 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5zxx\" (UniqueName: \"kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx\") pod \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.274480 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content\") pod \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.274555 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities\") pod \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\" (UID: \"f676cd7f-61b9-43e5-9ca0-4a3deddecff3\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.276053 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities" (OuterVolumeSpecName: "utilities") pod "f676cd7f-61b9-43e5-9ca0-4a3deddecff3" (UID: "f676cd7f-61b9-43e5-9ca0-4a3deddecff3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.281079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx" (OuterVolumeSpecName: "kube-api-access-q5zxx") pod "f676cd7f-61b9-43e5-9ca0-4a3deddecff3" (UID: "f676cd7f-61b9-43e5-9ca0-4a3deddecff3"). InnerVolumeSpecName "kube-api-access-q5zxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.300950 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.376294 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.376339 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5zxx\" (UniqueName: \"kubernetes.io/projected/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-kube-api-access-q5zxx\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.476824 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl62j\" (UniqueName: \"kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j\") pod \"84a91c75-202c-449d-b70a-569fafdf09fa\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.476899 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities\") pod \"84a91c75-202c-449d-b70a-569fafdf09fa\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.476962 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content\") pod \"84a91c75-202c-449d-b70a-569fafdf09fa\" (UID: \"84a91c75-202c-449d-b70a-569fafdf09fa\") " Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.478853 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities" (OuterVolumeSpecName: "utilities") pod "84a91c75-202c-449d-b70a-569fafdf09fa" (UID: "84a91c75-202c-449d-b70a-569fafdf09fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.484343 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j" (OuterVolumeSpecName: "kube-api-access-vl62j") pod "84a91c75-202c-449d-b70a-569fafdf09fa" (UID: "84a91c75-202c-449d-b70a-569fafdf09fa"). InnerVolumeSpecName "kube-api-access-vl62j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.578752 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl62j\" (UniqueName: \"kubernetes.io/projected/84a91c75-202c-449d-b70a-569fafdf09fa-kube-api-access-vl62j\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.578803 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.693169 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84a91c75-202c-449d-b70a-569fafdf09fa" (UID: "84a91c75-202c-449d-b70a-569fafdf09fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.717960 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z97nt" event={"ID":"84a91c75-202c-449d-b70a-569fafdf09fa","Type":"ContainerDied","Data":"2b74b359d44a5e0d5f485850ba74fc4a20d94c860f5d3eb541a2303aabcebf2e"} Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.718010 4998 scope.go:117] "RemoveContainer" containerID="834ba1f164f2ee235971f9ad25ef418d7877a1a0119ce1ddff87524a4cfb565a" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.718060 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z97nt" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.718571 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f676cd7f-61b9-43e5-9ca0-4a3deddecff3" (UID: "f676cd7f-61b9-43e5-9ca0-4a3deddecff3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.721627 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk44q" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.721898 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk44q" event={"ID":"f676cd7f-61b9-43e5-9ca0-4a3deddecff3","Type":"ContainerDied","Data":"43be0648867f8bae82185861b40ea418b507bdfa4238624c219d7c4a1db6aebe"} Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.737899 4998 scope.go:117] "RemoveContainer" containerID="e270093bf7c337ebb29e2174176841dd30a631f77f9d194a7e87d6c6ec08ed10" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.752157 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.760287 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z97nt"] Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.764020 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.767625 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zk44q"] Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.781867 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a91c75-202c-449d-b70a-569fafdf09fa-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.781901 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f676cd7f-61b9-43e5-9ca0-4a3deddecff3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.786128 4998 scope.go:117] "RemoveContainer" containerID="dc02a2dca2616cdee6b614207023258868caaa98794edee6b9325a55087d2fd5" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.802069 4998 scope.go:117] "RemoveContainer" containerID="d5d763553f402056a1745129e08bfe5cd0f77516499e44edb41b54484706a5f6" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.820008 4998 scope.go:117] "RemoveContainer" containerID="f9e88d4aa9b6a0f05db89e38e6fa188c9bf9a8ea2f7a748912c0b6fcc1c4a198" Feb 03 06:48:54 crc kubenswrapper[4998]: I0203 06:48:54.837837 4998 scope.go:117] "RemoveContainer" containerID="bf5f761cde0063825dd9829609e2b4e05b4c049f03fcd9c503882452c9991553" Feb 03 06:48:56 crc kubenswrapper[4998]: I0203 06:48:56.435389 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" path="/var/lib/kubelet/pods/84a91c75-202c-449d-b70a-569fafdf09fa/volumes" Feb 03 06:48:56 crc kubenswrapper[4998]: I0203 06:48:56.439809 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" path="/var/lib/kubelet/pods/f676cd7f-61b9-43e5-9ca0-4a3deddecff3/volumes" Feb 03 06:48:59 crc kubenswrapper[4998]: I0203 06:48:59.467049 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:49:00 crc kubenswrapper[4998]: I0203 06:49:00.458083 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 06:49:01 crc kubenswrapper[4998]: I0203 06:49:01.510669 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zx8sw"] Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.308247 4998 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.309462 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.309534 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.309625 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.309677 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.309736 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.309801 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.309868 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" containerName="pruner" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.309924 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" containerName="pruner" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.309980 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310029 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310085 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310231 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310287 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310336 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310392 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310443 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310501 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310557 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="extract-utilities" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310609 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310658 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310713 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310797 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.310874 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.310940 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.311019 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311081 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="extract-content" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311216 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a47b8c4a-b3ea-45da-9a08-2d96ab32e3b0" containerName="pruner" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311282 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="73c4b34c-ece7-46c1-bce9-daf4c661b302" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311339 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f676cd7f-61b9-43e5-9ca0-4a3deddecff3" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311391 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ca9561b-1685-47eb-af5c-1eaf0266920d" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311451 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a91c75-202c-449d-b70a-569fafdf09fa" containerName="registry-server" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.311889 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.334969 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.335046 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.335092 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.335116 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.335161 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.355004 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.367486 4998 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.367989 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b" gracePeriod=15 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.368049 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf" gracePeriod=15 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.368051 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972" gracePeriod=15 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.368092 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9" gracePeriod=15 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.368058 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731" gracePeriod=15 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369149 4998 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369477 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369515 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369540 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369556 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369581 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369599 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369624 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369643 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369672 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369691 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.369714 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.369734 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370035 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370069 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370094 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370125 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370152 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.370398 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370422 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.370639 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.439867 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.439942 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.439966 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440002 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440044 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440032 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440094 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440127 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440198 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440211 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440399 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440486 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.440580 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541485 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541566 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541622 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541642 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.541689 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.650931 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:06 crc kubenswrapper[4998]: E0203 06:49:06.670610 4998 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.129:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1890a9cbe0a9c493 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,LastTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.792194 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.794149 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.795127 4998 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf" exitCode=0 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.795156 4998 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731" exitCode=0 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.795166 4998 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972" exitCode=0 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.795176 4998 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9" exitCode=2 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.795247 4998 scope.go:117] "RemoveContainer" containerID="35ab5e2fc89d8e658f04c9a5ed234dad845dcf70d39e89bd52fed8f84b6c8c68" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.797133 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a77d3720f3d59076c1ccab57941a13ba6c1d1a06792c7e935d076890d4fd7046"} Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.798903 4998 generic.go:334] "Generic (PLEG): container finished" podID="55150a43-6d33-4ac4-b65e-04c8e15a8376" containerID="c61928c2032bdb02b0da4ca31bf3f630c6602b64b9f89f980ce7de6dfde5b074" exitCode=0 Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.798931 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"55150a43-6d33-4ac4-b65e-04c8e15a8376","Type":"ContainerDied","Data":"c61928c2032bdb02b0da4ca31bf3f630c6602b64b9f89f980ce7de6dfde5b074"} Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.799527 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:06 crc kubenswrapper[4998]: I0203 06:49:06.799741 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:07 crc kubenswrapper[4998]: I0203 06:49:07.806576 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4"} Feb 03 06:49:07 crc kubenswrapper[4998]: I0203 06:49:07.807587 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:07 crc kubenswrapper[4998]: I0203 06:49:07.807982 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:07 crc kubenswrapper[4998]: I0203 06:49:07.811119 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.048765 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.049614 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.050058 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064396 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access\") pod \"55150a43-6d33-4ac4-b65e-04c8e15a8376\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064503 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock\") pod \"55150a43-6d33-4ac4-b65e-04c8e15a8376\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064565 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir\") pod \"55150a43-6d33-4ac4-b65e-04c8e15a8376\" (UID: \"55150a43-6d33-4ac4-b65e-04c8e15a8376\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064617 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock" (OuterVolumeSpecName: "var-lock") pod "55150a43-6d33-4ac4-b65e-04c8e15a8376" (UID: "55150a43-6d33-4ac4-b65e-04c8e15a8376"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064759 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "55150a43-6d33-4ac4-b65e-04c8e15a8376" (UID: "55150a43-6d33-4ac4-b65e-04c8e15a8376"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064956 4998 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-var-lock\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.064985 4998 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55150a43-6d33-4ac4-b65e-04c8e15a8376-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.069476 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "55150a43-6d33-4ac4-b65e-04c8e15a8376" (UID: "55150a43-6d33-4ac4-b65e-04c8e15a8376"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.167057 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55150a43-6d33-4ac4-b65e-04c8e15a8376-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.664441 4998 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.129:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1890a9cbe0a9c493 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,LastTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.734246 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.735193 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.735990 4998 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.736697 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.737264 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.825628 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.826885 4998 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b" exitCode=0 Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.827008 4998 scope.go:117] "RemoveContainer" containerID="71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.827028 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.829757 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.829970 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"55150a43-6d33-4ac4-b65e-04c8e15a8376","Type":"ContainerDied","Data":"de5ca64e8513bc4e227873f25181c6f0d7c0bfa86696c7c9c1c4697861207caf"} Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.830050 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de5ca64e8513bc4e227873f25181c6f0d7c0bfa86696c7c9c1c4697861207caf" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.841192 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.841854 4998 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.842428 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.852275 4998 scope.go:117] "RemoveContainer" containerID="f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.871684 4998 scope.go:117] "RemoveContainer" containerID="5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.887663 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.887757 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.887914 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.887983 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.888300 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.888445 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.888665 4998 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.888712 4998 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.888737 4998 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.891346 4998 scope.go:117] "RemoveContainer" containerID="83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.912950 4998 scope.go:117] "RemoveContainer" containerID="db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.935372 4998 scope.go:117] "RemoveContainer" containerID="09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.965219 4998 scope.go:117] "RemoveContainer" containerID="71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.965889 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\": container with ID starting with 71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf not found: ID does not exist" containerID="71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.965963 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf"} err="failed to get container status \"71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\": rpc error: code = NotFound desc = could not find container \"71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf\": container with ID starting with 71f1515029d6cab73375dd9f74bc1282f2e0209a50ac805b4a283f5321013cdf not found: ID does not exist" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.966007 4998 scope.go:117] "RemoveContainer" containerID="f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.966773 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\": container with ID starting with f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731 not found: ID does not exist" containerID="f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.966877 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731"} err="failed to get container status \"f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\": rpc error: code = NotFound desc = could not find container \"f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731\": container with ID starting with f202ee2ad98c1fa18e86de355958ae24804a9377dcb8ecb2305696125d36e731 not found: ID does not exist" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.966936 4998 scope.go:117] "RemoveContainer" containerID="5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.967530 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\": container with ID starting with 5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972 not found: ID does not exist" containerID="5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.967581 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972"} err="failed to get container status \"5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\": rpc error: code = NotFound desc = could not find container \"5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972\": container with ID starting with 5fafee50dcea20b6bb6642a9df64ff5814e5109c3ffdeebc6e567b1bd466d972 not found: ID does not exist" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.967622 4998 scope.go:117] "RemoveContainer" containerID="83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.981400 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\": container with ID starting with 83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9 not found: ID does not exist" containerID="83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.981511 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9"} err="failed to get container status \"83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\": rpc error: code = NotFound desc = could not find container \"83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9\": container with ID starting with 83c1671ca47fb177b94aa339d451e2e7da43d0b570365de85dd93a5608e2ebc9 not found: ID does not exist" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.981555 4998 scope.go:117] "RemoveContainer" containerID="db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.982547 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\": container with ID starting with db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b not found: ID does not exist" containerID="db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.982587 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b"} err="failed to get container status \"db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\": rpc error: code = NotFound desc = could not find container \"db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b\": container with ID starting with db6508ce9c679e0803e3fc2184c1632dd0d208398177e9c2e607f26cee18857b not found: ID does not exist" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.982612 4998 scope.go:117] "RemoveContainer" containerID="09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02" Feb 03 06:49:08 crc kubenswrapper[4998]: E0203 06:49:08.983799 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\": container with ID starting with 09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02 not found: ID does not exist" containerID="09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02" Feb 03 06:49:08 crc kubenswrapper[4998]: I0203 06:49:08.983844 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02"} err="failed to get container status \"09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\": rpc error: code = NotFound desc = could not find container \"09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02\": container with ID starting with 09b9caaf7cfd029eb2f2d3797665b027e6d9f787f9fb86bd01696e31a1e4fc02 not found: ID does not exist" Feb 03 06:49:09 crc kubenswrapper[4998]: I0203 06:49:09.141717 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:09 crc kubenswrapper[4998]: I0203 06:49:09.142161 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:09 crc kubenswrapper[4998]: I0203 06:49:09.142443 4998 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:10 crc kubenswrapper[4998]: I0203 06:49:10.437408 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.037772 4998 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.038727 4998 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.039297 4998 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.042315 4998 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.043100 4998 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:11 crc kubenswrapper[4998]: I0203 06:49:11.043148 4998 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.043456 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="200ms" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.244930 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="400ms" Feb 03 06:49:11 crc kubenswrapper[4998]: E0203 06:49:11.646259 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="800ms" Feb 03 06:49:12 crc kubenswrapper[4998]: I0203 06:49:12.430099 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:12 crc kubenswrapper[4998]: I0203 06:49:12.430514 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:12 crc kubenswrapper[4998]: E0203 06:49:12.447290 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="1.6s" Feb 03 06:49:12 crc kubenswrapper[4998]: I0203 06:49:12.754411 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:49:12 crc kubenswrapper[4998]: I0203 06:49:12.754481 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:49:14 crc kubenswrapper[4998]: E0203 06:49:14.048629 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="3.2s" Feb 03 06:49:14 crc kubenswrapper[4998]: E0203 06:49:14.572394 4998 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.129:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" volumeName="registry-storage" Feb 03 06:49:17 crc kubenswrapper[4998]: E0203 06:49:17.249943 4998 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="6.4s" Feb 03 06:49:18 crc kubenswrapper[4998]: E0203 06:49:18.666138 4998 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.129:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1890a9cbe0a9c493 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,LastTimestamp:2026-02-03 06:49:06.669282451 +0000 UTC m=+184.955976257,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.427433 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.428429 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.428937 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.445880 4998 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.445916 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:20 crc kubenswrapper[4998]: E0203 06:49:20.446295 4998 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.446674 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:20 crc kubenswrapper[4998]: W0203 06:49:20.466564 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-0822ba339b945c5b7b643bcde81395dbcd0d1148470f63a50e679bdda088b37f WatchSource:0}: Error finding container 0822ba339b945c5b7b643bcde81395dbcd0d1148470f63a50e679bdda088b37f: Status 404 returned error can't find the container with id 0822ba339b945c5b7b643bcde81395dbcd0d1148470f63a50e679bdda088b37f Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.904511 4998 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="610b5a8e380a1ab799afea325a60e4d1a7adfd40f493e566a0bd0319b43d5919" exitCode=0 Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.904628 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"610b5a8e380a1ab799afea325a60e4d1a7adfd40f493e566a0bd0319b43d5919"} Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.904769 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0822ba339b945c5b7b643bcde81395dbcd0d1148470f63a50e679bdda088b37f"} Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.905166 4998 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.905195 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:20 crc kubenswrapper[4998]: E0203 06:49:20.905854 4998 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.905913 4998 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:20 crc kubenswrapper[4998]: I0203 06:49:20.906333 4998 status_manager.go:851] "Failed to get status for pod" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.129:6443: connect: connection refused" Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.917947 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.918376 4998 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2" exitCode=1 Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.918436 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2"} Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.918901 4998 scope.go:117] "RemoveContainer" containerID="3631ce704525fba5e76870d8f108ed0e5d21e9b061d2ffc99b84c1c4755aa0e2" Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.923318 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"eb9428a29c3964e08071c7842b096a8edf1d6e8ea4374db42b4811909a4c25bc"} Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.923345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fc0527bbca293d34e0a5a9dd5f28858fcf50571480f663ffb0f0a23f9eb61a20"} Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.923357 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0dc7bc96d9a49c3ed0dbf2dcabccc98014bb40208497ef083833cdcb7b79741c"} Feb 03 06:49:21 crc kubenswrapper[4998]: I0203 06:49:21.923367 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2240d0262e19ce98e7d68e8f0e344d0ae3e19d475370305931ba5eadadbfe8fd"} Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.930919 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f506fa6ddc1b6918b51189985b878d225a27fc639ee172ca6bb233a351a132e4"} Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.931170 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.931211 4998 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.931245 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.935362 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 03 06:49:22 crc kubenswrapper[4998]: I0203 06:49:22.935414 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"79950478fff712297d3577b15a3ea105bf2902e9230679ab5357ff57a00c3aca"} Feb 03 06:49:25 crc kubenswrapper[4998]: I0203 06:49:25.447607 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:25 crc kubenswrapper[4998]: I0203 06:49:25.447886 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:25 crc kubenswrapper[4998]: I0203 06:49:25.452454 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.542450 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerName="oauth-openshift" containerID="cri-o://a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527" gracePeriod=15 Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.873404 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.955374 4998 generic.go:334] "Generic (PLEG): container finished" podID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerID="a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527" exitCode=0 Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.955419 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" event={"ID":"263a6d98-1027-4782-8d2b-1b7274f389ea","Type":"ContainerDied","Data":"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527"} Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.955483 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" event={"ID":"263a6d98-1027-4782-8d2b-1b7274f389ea","Type":"ContainerDied","Data":"aa33c98daf25020fb519f53af7525d0c0f3673d74714922012bbe6bf8e2a7321"} Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.955505 4998 scope.go:117] "RemoveContainer" containerID="a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527" Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.955425 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zx8sw" Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.971398 4998 scope.go:117] "RemoveContainer" containerID="a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527" Feb 03 06:49:26 crc kubenswrapper[4998]: E0203 06:49:26.971844 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527\": container with ID starting with a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527 not found: ID does not exist" containerID="a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527" Feb 03 06:49:26 crc kubenswrapper[4998]: I0203 06:49:26.971877 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527"} err="failed to get container status \"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527\": rpc error: code = NotFound desc = could not find container \"a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527\": container with ID starting with a41e006ef9230706aa008053cde8632ea1fa0cd66df8de82383b06da1ef90527 not found: ID does not exist" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.038262 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.039947 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040002 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040250 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040306 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040332 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040338 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040358 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040384 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040416 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040437 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040464 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjbxk\" (UniqueName: \"kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040490 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040516 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040541 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040573 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca\") pod \"263a6d98-1027-4782-8d2b-1b7274f389ea\" (UID: \"263a6d98-1027-4782-8d2b-1b7274f389ea\") " Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.040742 4998 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.041331 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.041857 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.042141 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.042203 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.046943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047125 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047205 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047217 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk" (OuterVolumeSpecName: "kube-api-access-jjbxk") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "kube-api-access-jjbxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047389 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047706 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.047728 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.048620 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.051953 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "263a6d98-1027-4782-8d2b-1b7274f389ea" (UID: "263a6d98-1027-4782-8d2b-1b7274f389ea"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143385 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143446 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143468 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143483 4998 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143497 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143510 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143523 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjbxk\" (UniqueName: \"kubernetes.io/projected/263a6d98-1027-4782-8d2b-1b7274f389ea-kube-api-access-jjbxk\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143536 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143551 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143565 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143579 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143593 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.143606 4998 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/263a6d98-1027-4782-8d2b-1b7274f389ea-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.941049 4998 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.962603 4998 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.962631 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.967528 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:27 crc kubenswrapper[4998]: I0203 06:49:27.971185 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="24cffacc-f8c8-469b-b4e4-527c99b60635" Feb 03 06:49:28 crc kubenswrapper[4998]: E0203 06:49:28.040445 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Feb 03 06:49:28 crc kubenswrapper[4998]: E0203 06:49:28.466677 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Feb 03 06:49:28 crc kubenswrapper[4998]: I0203 06:49:28.686688 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:49:28 crc kubenswrapper[4998]: I0203 06:49:28.692086 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:49:28 crc kubenswrapper[4998]: E0203 06:49:28.725984 4998 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-ocp-branding-template\": Failed to watch *v1.Secret: unknown (get secrets)" logger="UnhandledError" Feb 03 06:49:28 crc kubenswrapper[4998]: I0203 06:49:28.968247 4998 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:28 crc kubenswrapper[4998]: I0203 06:49:28.968285 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2c8c93fe-ba86-4899-a018-d24fb324de5c" Feb 03 06:49:32 crc kubenswrapper[4998]: I0203 06:49:32.449332 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="24cffacc-f8c8-469b-b4e4-527c99b60635" Feb 03 06:49:37 crc kubenswrapper[4998]: I0203 06:49:37.046674 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 06:49:37 crc kubenswrapper[4998]: I0203 06:49:37.307677 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 03 06:49:37 crc kubenswrapper[4998]: I0203 06:49:37.357450 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 03 06:49:38 crc kubenswrapper[4998]: I0203 06:49:38.537967 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 03 06:49:38 crc kubenswrapper[4998]: I0203 06:49:38.721302 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 06:49:38 crc kubenswrapper[4998]: I0203 06:49:38.785520 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 03 06:49:38 crc kubenswrapper[4998]: I0203 06:49:38.906973 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 03 06:49:38 crc kubenswrapper[4998]: I0203 06:49:38.989749 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.086003 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.117101 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.151657 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.243474 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.276477 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.379343 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.427395 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.445349 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.484857 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.496746 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.600188 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.689620 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.693747 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.745290 4998 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.746994 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.747646 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=33.747629268 podStartE2EDuration="33.747629268s" podCreationTimestamp="2026-02-03 06:49:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:49:27.671496095 +0000 UTC m=+205.958189901" watchObservedRunningTime="2026-02-03 06:49:39.747629268 +0000 UTC m=+218.034323074" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.749429 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-zx8sw"] Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.749478 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.753342 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.767316 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=12.767295136 podStartE2EDuration="12.767295136s" podCreationTimestamp="2026-02-03 06:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:49:39.764147007 +0000 UTC m=+218.050840833" watchObservedRunningTime="2026-02-03 06:49:39.767295136 +0000 UTC m=+218.053988942" Feb 03 06:49:39 crc kubenswrapper[4998]: I0203 06:49:39.886202 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.094191 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.115946 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.152344 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.153014 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.196499 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.436318 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" path="/var/lib/kubelet/pods/263a6d98-1027-4782-8d2b-1b7274f389ea/volumes" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.490996 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.610122 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.650882 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.796921 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.843291 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.865325 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.913137 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 03 06:49:40 crc kubenswrapper[4998]: I0203 06:49:40.936757 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.014423 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.022698 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.176030 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.297265 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.404144 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.575858 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.577975 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.658672 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.815257 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.859261 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.930820 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.964743 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 03 06:49:41 crc kubenswrapper[4998]: I0203 06:49:41.988899 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.133040 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.141667 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.208068 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.225948 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.319632 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.398949 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.447981 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.508613 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.525931 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.554444 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.571549 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.582230 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.592979 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.702051 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.730273 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.754484 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.754534 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.754570 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.755053 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.755102 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e" gracePeriod=600 Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.783460 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.820564 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.848223 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.866101 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 03 06:49:42 crc kubenswrapper[4998]: I0203 06:49:42.867611 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.025959 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.051122 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e" exitCode=0 Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.051167 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e"} Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.051202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b"} Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.068876 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.117542 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.143816 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.168190 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.236746 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.275958 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.293769 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.317287 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.398099 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.507207 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.516933 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.800546 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.880057 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 03 06:49:43 crc kubenswrapper[4998]: I0203 06:49:43.933697 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.013486 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.027459 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.107074 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.175059 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.182842 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.305950 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.332735 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.352351 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.382507 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.608812 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.773965 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.790413 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.826922 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.884428 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.952023 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 03 06:49:44 crc kubenswrapper[4998]: I0203 06:49:44.979656 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.118991 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.193458 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.239034 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.288768 4998 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.360647 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.407659 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.456021 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.465708 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.494446 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.547125 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.598173 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.644477 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.668163 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.715012 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.745238 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.755428 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.802560 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.919574 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 03 06:49:45 crc kubenswrapper[4998]: I0203 06:49:45.923587 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.061426 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.085628 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.146325 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.148390 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.172242 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.191473 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.198881 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.207203 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.300007 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.310613 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.329376 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.360009 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.438121 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.470403 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.477970 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.537598 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.632756 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.786142 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.788378 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.793826 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.899469 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 03 06:49:46 crc kubenswrapper[4998]: I0203 06:49:46.973708 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.063256 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.144591 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.179124 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.209596 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.262967 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.322847 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.339070 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.347793 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.400150 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.542810 4998 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.581252 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.672683 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.727480 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.831312 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.865515 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.915590 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 03 06:49:47 crc kubenswrapper[4998]: I0203 06:49:47.918152 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.170564 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.253345 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.337842 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.382064 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.477770 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.558152 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.634655 4998 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.651966 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.747830 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.865163 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.870242 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.927106 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.927699 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.930735 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.935311 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.953676 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 03 06:49:48 crc kubenswrapper[4998]: I0203 06:49:48.971610 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.014322 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.054362 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.096331 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.110648 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.144997 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.206128 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.297994 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.463818 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.525505 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.616873 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.660082 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.695223 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.703966 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.808916 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.824398 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.866684 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.894312 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.937176 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.964069 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 03 06:49:49 crc kubenswrapper[4998]: I0203 06:49:49.968565 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.280023 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.311357 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.312181 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.327956 4998 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.328165 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4" gracePeriod=5 Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.359156 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.384771 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.407527 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-l7rnc"] Feb 03 06:49:50 crc kubenswrapper[4998]: E0203 06:49:50.408048 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerName="oauth-openshift" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.408197 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerName="oauth-openshift" Feb 03 06:49:50 crc kubenswrapper[4998]: E0203 06:49:50.408292 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" containerName="installer" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.408372 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" containerName="installer" Feb 03 06:49:50 crc kubenswrapper[4998]: E0203 06:49:50.409062 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.409150 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.409332 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.409425 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="55150a43-6d33-4ac4-b65e-04c8e15a8376" containerName="installer" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.409507 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="263a6d98-1027-4782-8d2b-1b7274f389ea" containerName="oauth-openshift" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.410056 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.412269 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.412912 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.413506 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.416582 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.416993 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.417263 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.417577 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.417893 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.418217 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.418701 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.419008 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.419193 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.422006 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.426307 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427265 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427295 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzlcq\" (UniqueName: \"kubernetes.io/projected/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-kube-api-access-kzlcq\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427321 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-policies\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427342 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427373 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427415 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427447 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427483 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427503 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427521 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427540 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427566 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-dir\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427581 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.427638 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.432106 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.435302 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-l7rnc"] Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.519416 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529210 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529268 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529298 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzlcq\" (UniqueName: \"kubernetes.io/projected/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-kube-api-access-kzlcq\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529329 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-policies\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529352 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529380 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529426 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529470 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529501 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529524 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529549 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529570 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529591 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-dir\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.529609 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.530458 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.530937 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-policies\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.531044 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.531154 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.533476 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-audit-dir\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.535557 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.536342 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.536532 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.536858 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.538758 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.539252 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.540369 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.543413 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.556988 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzlcq\" (UniqueName: \"kubernetes.io/projected/8c545b5d-659a-4acb-ad50-ddb81cf8ebd4-kube-api-access-kzlcq\") pod \"oauth-openshift-5678f9c799-l7rnc\" (UID: \"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4\") " pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.682389 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.737623 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:50 crc kubenswrapper[4998]: I0203 06:49:50.767356 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.011269 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-l7rnc"] Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.100261 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" event={"ID":"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4","Type":"ContainerStarted","Data":"c799c008023d1fe830e117bcb017b8d0244f3ec211b35ff5697157202747da24"} Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.131863 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.188338 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.213514 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.287970 4998 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.314269 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.512256 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.603278 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.696956 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.744550 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.778898 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.835020 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.962889 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 03 06:49:51 crc kubenswrapper[4998]: I0203 06:49:51.968116 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.008656 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.061531 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.107153 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" event={"ID":"8c545b5d-659a-4acb-ad50-ddb81cf8ebd4","Type":"ContainerStarted","Data":"59160efbf252ed2d6095fc796835d3be587207afc764f48209e92dc8e910ef1b"} Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.107437 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.113500 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.129908 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5678f9c799-l7rnc" podStartSLOduration=51.129894168 podStartE2EDuration="51.129894168s" podCreationTimestamp="2026-02-03 06:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:49:52.126258984 +0000 UTC m=+230.412952810" watchObservedRunningTime="2026-02-03 06:49:52.129894168 +0000 UTC m=+230.416587974" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.291288 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.391921 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.405257 4998 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.446763 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.500070 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.608579 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.757747 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 03 06:49:52 crc kubenswrapper[4998]: I0203 06:49:52.837267 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 03 06:49:53 crc kubenswrapper[4998]: I0203 06:49:53.075098 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 03 06:49:53 crc kubenswrapper[4998]: I0203 06:49:53.188554 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 03 06:49:53 crc kubenswrapper[4998]: I0203 06:49:53.485465 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 03 06:49:53 crc kubenswrapper[4998]: I0203 06:49:53.555835 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 06:49:53 crc kubenswrapper[4998]: I0203 06:49:53.738851 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 03 06:49:54 crc kubenswrapper[4998]: I0203 06:49:54.229462 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 03 06:49:54 crc kubenswrapper[4998]: I0203 06:49:54.299342 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 03 06:49:54 crc kubenswrapper[4998]: I0203 06:49:54.372847 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 03 06:49:54 crc kubenswrapper[4998]: I0203 06:49:54.514460 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 03 06:49:54 crc kubenswrapper[4998]: I0203 06:49:54.841475 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 03 06:49:55 crc kubenswrapper[4998]: I0203 06:49:55.897190 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 03 06:49:55 crc kubenswrapper[4998]: I0203 06:49:55.897492 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.064225 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099726 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099797 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099837 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099874 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099922 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099939 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099972 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.099999 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.100139 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.100166 4998 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.100232 4998 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.100254 4998 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.114776 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.133734 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.133842 4998 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4" exitCode=137 Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.133904 4998 scope.go:117] "RemoveContainer" containerID="a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.134065 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.148094 4998 scope.go:117] "RemoveContainer" containerID="a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4" Feb 03 06:49:56 crc kubenswrapper[4998]: E0203 06:49:56.148707 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4\": container with ID starting with a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4 not found: ID does not exist" containerID="a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.148772 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4"} err="failed to get container status \"a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4\": rpc error: code = NotFound desc = could not find container \"a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4\": container with ID starting with a671fd8230490a2a795d53a0e895ceb3aac62782cad0a36c7ca98696b3a079b4 not found: ID does not exist" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.201466 4998 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.201507 4998 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.438418 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.438934 4998 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.454106 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.454162 4998 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b2335abf-4c09-4a15-826f-f06c2155228a" Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.460142 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 06:49:56 crc kubenswrapper[4998]: I0203 06:49:56.460190 4998 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b2335abf-4c09-4a15-826f-f06c2155228a" Feb 03 06:50:10 crc kubenswrapper[4998]: I0203 06:50:10.219124 4998 generic.go:334] "Generic (PLEG): container finished" podID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerID="3a64151fe912b987d3600efafaf1ba6a364f80d47a503b4ddf6cecedf9d90c0b" exitCode=0 Feb 03 06:50:10 crc kubenswrapper[4998]: I0203 06:50:10.219907 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerDied","Data":"3a64151fe912b987d3600efafaf1ba6a364f80d47a503b4ddf6cecedf9d90c0b"} Feb 03 06:50:10 crc kubenswrapper[4998]: I0203 06:50:10.220577 4998 scope.go:117] "RemoveContainer" containerID="3a64151fe912b987d3600efafaf1ba6a364f80d47a503b4ddf6cecedf9d90c0b" Feb 03 06:50:11 crc kubenswrapper[4998]: I0203 06:50:11.226115 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerStarted","Data":"8ad4b8327e071d15241f172ee52e86c4403a6249e1c9ca5ce255f1192c9b29fc"} Feb 03 06:50:11 crc kubenswrapper[4998]: I0203 06:50:11.226897 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:50:11 crc kubenswrapper[4998]: I0203 06:50:11.228198 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:50:21 crc kubenswrapper[4998]: I0203 06:50:21.890299 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:50:21 crc kubenswrapper[4998]: I0203 06:50:21.890996 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" containerID="cri-o://8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c" gracePeriod=30 Feb 03 06:50:21 crc kubenswrapper[4998]: I0203 06:50:21.989418 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:50:21 crc kubenswrapper[4998]: I0203 06:50:21.989645 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerName="route-controller-manager" containerID="cri-o://05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125" gracePeriod=30 Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.243455 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.298589 4998 generic.go:334] "Generic (PLEG): container finished" podID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerID="8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c" exitCode=0 Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.298661 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" event={"ID":"943dc95b-dbe5-4561-bbe2-b2bf1680eb60","Type":"ContainerDied","Data":"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c"} Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.298696 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" event={"ID":"943dc95b-dbe5-4561-bbe2-b2bf1680eb60","Type":"ContainerDied","Data":"2f3e4aff546911b56fd78c407bcf2a71607acfa0ff82b6e7c3d5d42da128eb9a"} Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.298716 4998 scope.go:117] "RemoveContainer" containerID="8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.298856 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cv476" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.299582 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.301647 4998 generic.go:334] "Generic (PLEG): container finished" podID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerID="05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125" exitCode=0 Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.301692 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" event={"ID":"2f8c8aee-306f-4517-80a8-61eb2ee886d7","Type":"ContainerDied","Data":"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125"} Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.301721 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" event={"ID":"2f8c8aee-306f-4517-80a8-61eb2ee886d7","Type":"ContainerDied","Data":"965237083f7e5a2433ab438390949660413195fe93bc11e0e83a7053287d1a56"} Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.323807 4998 scope.go:117] "RemoveContainer" containerID="8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c" Feb 03 06:50:22 crc kubenswrapper[4998]: E0203 06:50:22.333185 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c\": container with ID starting with 8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c not found: ID does not exist" containerID="8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.333225 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c"} err="failed to get container status \"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c\": rpc error: code = NotFound desc = could not find container \"8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c\": container with ID starting with 8b7c1807439db6f4eea73a6694de57844b5e8d72234f280850f916433fc10c0c not found: ID does not exist" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.333254 4998 scope.go:117] "RemoveContainer" containerID="05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.348612 4998 scope.go:117] "RemoveContainer" containerID="05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125" Feb 03 06:50:22 crc kubenswrapper[4998]: E0203 06:50:22.349056 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125\": container with ID starting with 05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125 not found: ID does not exist" containerID="05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.349089 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125"} err="failed to get container status \"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125\": rpc error: code = NotFound desc = could not find container \"05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125\": container with ID starting with 05d7aa1fc95a78195cb2dcb1006ad4356f8fe5455330ffee2e473975f9597125 not found: ID does not exist" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.430905 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m6dq\" (UniqueName: \"kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq\") pod \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.430954 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ptqw\" (UniqueName: \"kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw\") pod \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.430972 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config\") pod \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431018 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert\") pod \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431033 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca\") pod \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431120 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert\") pod \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\" (UID: \"2f8c8aee-306f-4517-80a8-61eb2ee886d7\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431141 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config\") pod \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431367 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca\") pod \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.431389 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles\") pod \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\" (UID: \"943dc95b-dbe5-4561-bbe2-b2bf1680eb60\") " Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.432105 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca" (OuterVolumeSpecName: "client-ca") pod "2f8c8aee-306f-4517-80a8-61eb2ee886d7" (UID: "2f8c8aee-306f-4517-80a8-61eb2ee886d7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.432333 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.432465 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config" (OuterVolumeSpecName: "config") pod "2f8c8aee-306f-4517-80a8-61eb2ee886d7" (UID: "2f8c8aee-306f-4517-80a8-61eb2ee886d7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.432448 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "943dc95b-dbe5-4561-bbe2-b2bf1680eb60" (UID: "943dc95b-dbe5-4561-bbe2-b2bf1680eb60"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.432508 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config" (OuterVolumeSpecName: "config") pod "943dc95b-dbe5-4561-bbe2-b2bf1680eb60" (UID: "943dc95b-dbe5-4561-bbe2-b2bf1680eb60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.433079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca" (OuterVolumeSpecName: "client-ca") pod "943dc95b-dbe5-4561-bbe2-b2bf1680eb60" (UID: "943dc95b-dbe5-4561-bbe2-b2bf1680eb60"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.437246 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2f8c8aee-306f-4517-80a8-61eb2ee886d7" (UID: "2f8c8aee-306f-4517-80a8-61eb2ee886d7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.437267 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw" (OuterVolumeSpecName: "kube-api-access-5ptqw") pod "2f8c8aee-306f-4517-80a8-61eb2ee886d7" (UID: "2f8c8aee-306f-4517-80a8-61eb2ee886d7"). InnerVolumeSpecName "kube-api-access-5ptqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.437413 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq" (OuterVolumeSpecName: "kube-api-access-9m6dq") pod "943dc95b-dbe5-4561-bbe2-b2bf1680eb60" (UID: "943dc95b-dbe5-4561-bbe2-b2bf1680eb60"). InnerVolumeSpecName "kube-api-access-9m6dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.438676 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "943dc95b-dbe5-4561-bbe2-b2bf1680eb60" (UID: "943dc95b-dbe5-4561-bbe2-b2bf1680eb60"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.533969 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m6dq\" (UniqueName: \"kubernetes.io/projected/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-kube-api-access-9m6dq\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534012 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ptqw\" (UniqueName: \"kubernetes.io/projected/2f8c8aee-306f-4517-80a8-61eb2ee886d7-kube-api-access-5ptqw\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534030 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c8aee-306f-4517-80a8-61eb2ee886d7-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534043 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534056 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c8aee-306f-4517-80a8-61eb2ee886d7-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534067 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534077 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.534088 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/943dc95b-dbe5-4561-bbe2-b2bf1680eb60-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.634659 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:50:22 crc kubenswrapper[4998]: I0203 06:50:22.639232 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cv476"] Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.315312 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.353325 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.357437 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-f2w4s"] Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.872983 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:23 crc kubenswrapper[4998]: E0203 06:50:23.873306 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerName="route-controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.873332 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerName="route-controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: E0203 06:50:23.873363 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.873376 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.873554 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" containerName="controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.873589 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" containerName="route-controller-manager" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.874126 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.877089 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.880161 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.880252 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.880999 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.881337 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.881334 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.882249 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.882914 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.889553 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.889585 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.890406 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.890491 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.890536 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.890755 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.897892 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.909643 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 06:50:23 crc kubenswrapper[4998]: I0203 06:50:23.915407 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050054 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050105 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050138 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050156 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx826\" (UniqueName: \"kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050179 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050373 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050454 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs8qm\" (UniqueName: \"kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050540 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.050582 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.151842 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.151936 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.151968 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx826\" (UniqueName: \"kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.151993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.152036 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.152058 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs8qm\" (UniqueName: \"kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.152096 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.152135 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.152157 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.153001 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.154331 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.155271 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.155487 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.156650 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.158422 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.158445 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.179951 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx826\" (UniqueName: \"kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826\") pod \"route-controller-manager-ffb978cbf-q7f7f\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.183810 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs8qm\" (UniqueName: \"kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm\") pod \"controller-manager-56c799745b-mpvhf\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.212252 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.224286 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.372868 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:24 crc kubenswrapper[4998]: W0203 06:50:24.379271 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca3975e5_3fb8_499f_af7a_dd7b5be6690a.slice/crio-0035106b892b6e10cd64319c7ede0f2a8c54fa9af9733f793234eafaa96a2359 WatchSource:0}: Error finding container 0035106b892b6e10cd64319c7ede0f2a8c54fa9af9733f793234eafaa96a2359: Status 404 returned error can't find the container with id 0035106b892b6e10cd64319c7ede0f2a8c54fa9af9733f793234eafaa96a2359 Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.411021 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:24 crc kubenswrapper[4998]: W0203 06:50:24.419694 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f4392aa_8f30_4159_92e3_82cf856455b2.slice/crio-78924a85d7718bab4523e416ff2188e1fb026fc4ab8b1da4f8d495524f75b4a9 WatchSource:0}: Error finding container 78924a85d7718bab4523e416ff2188e1fb026fc4ab8b1da4f8d495524f75b4a9: Status 404 returned error can't find the container with id 78924a85d7718bab4523e416ff2188e1fb026fc4ab8b1da4f8d495524f75b4a9 Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.433991 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f8c8aee-306f-4517-80a8-61eb2ee886d7" path="/var/lib/kubelet/pods/2f8c8aee-306f-4517-80a8-61eb2ee886d7/volumes" Feb 03 06:50:24 crc kubenswrapper[4998]: I0203 06:50:24.434626 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="943dc95b-dbe5-4561-bbe2-b2bf1680eb60" path="/var/lib/kubelet/pods/943dc95b-dbe5-4561-bbe2-b2bf1680eb60/volumes" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.329424 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" event={"ID":"7f4392aa-8f30-4159-92e3-82cf856455b2","Type":"ContainerStarted","Data":"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2"} Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.329685 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" event={"ID":"7f4392aa-8f30-4159-92e3-82cf856455b2","Type":"ContainerStarted","Data":"78924a85d7718bab4523e416ff2188e1fb026fc4ab8b1da4f8d495524f75b4a9"} Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.332081 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.336362 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.336492 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" event={"ID":"ca3975e5-3fb8-499f-af7a-dd7b5be6690a","Type":"ContainerStarted","Data":"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e"} Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.336516 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" event={"ID":"ca3975e5-3fb8-499f-af7a-dd7b5be6690a","Type":"ContainerStarted","Data":"0035106b892b6e10cd64319c7ede0f2a8c54fa9af9733f793234eafaa96a2359"} Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.336835 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.341430 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.359045 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" podStartSLOduration=4.359021185 podStartE2EDuration="4.359021185s" podCreationTimestamp="2026-02-03 06:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:50:25.354910744 +0000 UTC m=+263.641604640" watchObservedRunningTime="2026-02-03 06:50:25.359021185 +0000 UTC m=+263.645715001" Feb 03 06:50:25 crc kubenswrapper[4998]: I0203 06:50:25.417752 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" podStartSLOduration=3.417578632 podStartE2EDuration="3.417578632s" podCreationTimestamp="2026-02-03 06:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:50:25.414255247 +0000 UTC m=+263.700949093" watchObservedRunningTime="2026-02-03 06:50:25.417578632 +0000 UTC m=+263.704272478" Feb 03 06:50:27 crc kubenswrapper[4998]: I0203 06:50:27.651688 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:27 crc kubenswrapper[4998]: I0203 06:50:27.654996 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.351290 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" podUID="7f4392aa-8f30-4159-92e3-82cf856455b2" containerName="controller-manager" containerID="cri-o://0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2" gracePeriod=30 Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.351399 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" podUID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" containerName="route-controller-manager" containerID="cri-o://ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e" gracePeriod=30 Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.887482 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.914323 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:28 crc kubenswrapper[4998]: E0203 06:50:28.914532 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" containerName="route-controller-manager" Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.914543 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" containerName="route-controller-manager" Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.914636 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" containerName="route-controller-manager" Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.915013 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.924912 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:28 crc kubenswrapper[4998]: I0203 06:50:28.929292 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.016734 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert\") pod \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.016812 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx826\" (UniqueName: \"kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826\") pod \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.016856 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config\") pod \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.016930 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca\") pod \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\" (UID: \"ca3975e5-3fb8-499f-af7a-dd7b5be6690a\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.017087 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.017146 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqh86\" (UniqueName: \"kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.017186 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.017247 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.017798 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca" (OuterVolumeSpecName: "client-ca") pod "ca3975e5-3fb8-499f-af7a-dd7b5be6690a" (UID: "ca3975e5-3fb8-499f-af7a-dd7b5be6690a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.018354 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config" (OuterVolumeSpecName: "config") pod "ca3975e5-3fb8-499f-af7a-dd7b5be6690a" (UID: "ca3975e5-3fb8-499f-af7a-dd7b5be6690a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.022646 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ca3975e5-3fb8-499f-af7a-dd7b5be6690a" (UID: "ca3975e5-3fb8-499f-af7a-dd7b5be6690a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.023226 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826" (OuterVolumeSpecName: "kube-api-access-xx826") pod "ca3975e5-3fb8-499f-af7a-dd7b5be6690a" (UID: "ca3975e5-3fb8-499f-af7a-dd7b5be6690a"). InnerVolumeSpecName "kube-api-access-xx826". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118354 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles\") pod \"7f4392aa-8f30-4159-92e3-82cf856455b2\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118413 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs8qm\" (UniqueName: \"kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm\") pod \"7f4392aa-8f30-4159-92e3-82cf856455b2\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118466 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert\") pod \"7f4392aa-8f30-4159-92e3-82cf856455b2\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118504 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca\") pod \"7f4392aa-8f30-4159-92e3-82cf856455b2\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118546 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config\") pod \"7f4392aa-8f30-4159-92e3-82cf856455b2\" (UID: \"7f4392aa-8f30-4159-92e3-82cf856455b2\") " Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118692 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118734 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118799 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqh86\" (UniqueName: \"kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118843 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118926 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118942 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx826\" (UniqueName: \"kubernetes.io/projected/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-kube-api-access-xx826\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118955 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.118964 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3975e5-3fb8-499f-af7a-dd7b5be6690a-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.119207 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7f4392aa-8f30-4159-92e3-82cf856455b2" (UID: "7f4392aa-8f30-4159-92e3-82cf856455b2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.119223 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca" (OuterVolumeSpecName: "client-ca") pod "7f4392aa-8f30-4159-92e3-82cf856455b2" (UID: "7f4392aa-8f30-4159-92e3-82cf856455b2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.119368 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config" (OuterVolumeSpecName: "config") pod "7f4392aa-8f30-4159-92e3-82cf856455b2" (UID: "7f4392aa-8f30-4159-92e3-82cf856455b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.120149 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.120180 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.123653 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7f4392aa-8f30-4159-92e3-82cf856455b2" (UID: "7f4392aa-8f30-4159-92e3-82cf856455b2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.123655 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm" (OuterVolumeSpecName: "kube-api-access-qs8qm") pod "7f4392aa-8f30-4159-92e3-82cf856455b2" (UID: "7f4392aa-8f30-4159-92e3-82cf856455b2"). InnerVolumeSpecName "kube-api-access-qs8qm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.124420 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.135300 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqh86\" (UniqueName: \"kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86\") pod \"route-controller-manager-644d86f9bc-hzgnv\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.220368 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.220404 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs8qm\" (UniqueName: \"kubernetes.io/projected/7f4392aa-8f30-4159-92e3-82cf856455b2-kube-api-access-qs8qm\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.220417 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.220425 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4392aa-8f30-4159-92e3-82cf856455b2-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.220434 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f4392aa-8f30-4159-92e3-82cf856455b2-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.244728 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.358209 4998 generic.go:334] "Generic (PLEG): container finished" podID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" containerID="ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e" exitCode=0 Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.358273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" event={"ID":"ca3975e5-3fb8-499f-af7a-dd7b5be6690a","Type":"ContainerDied","Data":"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e"} Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.358300 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" event={"ID":"ca3975e5-3fb8-499f-af7a-dd7b5be6690a","Type":"ContainerDied","Data":"0035106b892b6e10cd64319c7ede0f2a8c54fa9af9733f793234eafaa96a2359"} Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.358320 4998 scope.go:117] "RemoveContainer" containerID="ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.358420 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.366702 4998 generic.go:334] "Generic (PLEG): container finished" podID="7f4392aa-8f30-4159-92e3-82cf856455b2" containerID="0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2" exitCode=0 Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.366739 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" event={"ID":"7f4392aa-8f30-4159-92e3-82cf856455b2","Type":"ContainerDied","Data":"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2"} Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.366767 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" event={"ID":"7f4392aa-8f30-4159-92e3-82cf856455b2","Type":"ContainerDied","Data":"78924a85d7718bab4523e416ff2188e1fb026fc4ab8b1da4f8d495524f75b4a9"} Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.366924 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c799745b-mpvhf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.401181 4998 scope.go:117] "RemoveContainer" containerID="ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e" Feb 03 06:50:29 crc kubenswrapper[4998]: E0203 06:50:29.401892 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e\": container with ID starting with ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e not found: ID does not exist" containerID="ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.401967 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e"} err="failed to get container status \"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e\": rpc error: code = NotFound desc = could not find container \"ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e\": container with ID starting with ea1acaa1a688f307459419b3951fdbe32bfee88d6266e5bc2746e0044cae157e not found: ID does not exist" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.401993 4998 scope.go:117] "RemoveContainer" containerID="0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.406106 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.411644 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-ffb978cbf-q7f7f"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.419556 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.421730 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-56c799745b-mpvhf"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.425010 4998 scope.go:117] "RemoveContainer" containerID="0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2" Feb 03 06:50:29 crc kubenswrapper[4998]: E0203 06:50:29.425466 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2\": container with ID starting with 0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2 not found: ID does not exist" containerID="0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.425532 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2"} err="failed to get container status \"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2\": rpc error: code = NotFound desc = could not find container \"0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2\": container with ID starting with 0dcb07e074985f320e638bdbff6f011bf755a1f502029709bbc0c485adc2d3d2 not found: ID does not exist" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.475895 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.731136 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.738133 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:50:29 crc kubenswrapper[4998]: E0203 06:50:29.738311 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f4392aa-8f30-4159-92e3-82cf856455b2" containerName="controller-manager" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.738324 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f4392aa-8f30-4159-92e3-82cf856455b2" containerName="controller-manager" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.738414 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f4392aa-8f30-4159-92e3-82cf856455b2" containerName="controller-manager" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.738799 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.741681 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.741767 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.742955 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.743557 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.744889 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.745514 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.757304 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.758425 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.927431 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.927478 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.927502 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.927522 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md75x\" (UniqueName: \"kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:29 crc kubenswrapper[4998]: I0203 06:50:29.927562 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.028322 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.028613 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.028692 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.028798 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md75x\" (UniqueName: \"kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.028896 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.030913 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.031123 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.031723 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.034008 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.047159 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md75x\" (UniqueName: \"kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x\") pod \"controller-manager-85d898fc-rvbjf\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.053871 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.280141 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.372075 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" event={"ID":"ea3764b6-1bb7-4884-aa73-2e29fad5b33e","Type":"ContainerStarted","Data":"16d49e3c0e28e135a3200ed0a2304e31f4a2b8cd69e03a2c6a2e4de342995264"} Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.373730 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" event={"ID":"965ed13f-051e-4ae2-9669-d4eede4b70ee","Type":"ContainerStarted","Data":"b4a2c30d51dd61c55e39a3ddf0f274f153a0eba11a507013c4f20a8378f5cc47"} Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.373793 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" event={"ID":"965ed13f-051e-4ae2-9669-d4eede4b70ee","Type":"ContainerStarted","Data":"dddac7557bf701e5caf18ea287db6b947d160254bf6992620ffe1b9829a302f1"} Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.373827 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.373831 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerName="route-controller-manager" containerID="cri-o://b4a2c30d51dd61c55e39a3ddf0f274f153a0eba11a507013c4f20a8378f5cc47" gracePeriod=30 Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.393420 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" podStartSLOduration=3.3934047019999998 podStartE2EDuration="3.393404702s" podCreationTimestamp="2026-02-03 06:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:50:30.391864783 +0000 UTC m=+268.678558619" watchObservedRunningTime="2026-02-03 06:50:30.393404702 +0000 UTC m=+268.680098508" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.434957 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f4392aa-8f30-4159-92e3-82cf856455b2" path="/var/lib/kubelet/pods/7f4392aa-8f30-4159-92e3-82cf856455b2/volumes" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.435733 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca3975e5-3fb8-499f-af7a-dd7b5be6690a" path="/var/lib/kubelet/pods/ca3975e5-3fb8-499f-af7a-dd7b5be6690a/volumes" Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.515254 4998 patch_prober.go:28] interesting pod/route-controller-manager-644d86f9bc-hzgnv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": read tcp 10.217.0.2:47164->10.217.0.60:8443: read: connection reset by peer" start-of-body= Feb 03 06:50:30 crc kubenswrapper[4998]: I0203 06:50:30.515325 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": read tcp 10.217.0.2:47164->10.217.0.60:8443: read: connection reset by peer" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.382303 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" event={"ID":"ea3764b6-1bb7-4884-aa73-2e29fad5b33e","Type":"ContainerStarted","Data":"b58feb02f1372fc68ff5fa5cd66f5843b0872e419447172ba749e98c5a30634e"} Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.382571 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.384426 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-644d86f9bc-hzgnv_965ed13f-051e-4ae2-9669-d4eede4b70ee/route-controller-manager/0.log" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.384476 4998 generic.go:334] "Generic (PLEG): container finished" podID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerID="b4a2c30d51dd61c55e39a3ddf0f274f153a0eba11a507013c4f20a8378f5cc47" exitCode=255 Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.384508 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" event={"ID":"965ed13f-051e-4ae2-9669-d4eede4b70ee","Type":"ContainerDied","Data":"b4a2c30d51dd61c55e39a3ddf0f274f153a0eba11a507013c4f20a8378f5cc47"} Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.387365 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.408103 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" podStartSLOduration=2.408081344 podStartE2EDuration="2.408081344s" podCreationTimestamp="2026-02-03 06:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:50:31.404738058 +0000 UTC m=+269.691431874" watchObservedRunningTime="2026-02-03 06:50:31.408081344 +0000 UTC m=+269.694775160" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.426138 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-644d86f9bc-hzgnv_965ed13f-051e-4ae2-9669-d4eede4b70ee/route-controller-manager/0.log" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.426202 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.454655 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:50:31 crc kubenswrapper[4998]: E0203 06:50:31.454950 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerName="route-controller-manager" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.454973 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerName="route-controller-manager" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.455089 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" containerName="route-controller-manager" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.455520 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.482357 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.560259 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca\") pod \"965ed13f-051e-4ae2-9669-d4eede4b70ee\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.560931 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqh86\" (UniqueName: \"kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86\") pod \"965ed13f-051e-4ae2-9669-d4eede4b70ee\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561060 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert\") pod \"965ed13f-051e-4ae2-9669-d4eede4b70ee\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561173 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config\") pod \"965ed13f-051e-4ae2-9669-d4eede4b70ee\" (UID: \"965ed13f-051e-4ae2-9669-d4eede4b70ee\") " Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561415 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561856 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.561981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v49x6\" (UniqueName: \"kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.563968 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config" (OuterVolumeSpecName: "config") pod "965ed13f-051e-4ae2-9669-d4eede4b70ee" (UID: "965ed13f-051e-4ae2-9669-d4eede4b70ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.564242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca" (OuterVolumeSpecName: "client-ca") pod "965ed13f-051e-4ae2-9669-d4eede4b70ee" (UID: "965ed13f-051e-4ae2-9669-d4eede4b70ee"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.568237 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "965ed13f-051e-4ae2-9669-d4eede4b70ee" (UID: "965ed13f-051e-4ae2-9669-d4eede4b70ee"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.568543 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86" (OuterVolumeSpecName: "kube-api-access-fqh86") pod "965ed13f-051e-4ae2-9669-d4eede4b70ee" (UID: "965ed13f-051e-4ae2-9669-d4eede4b70ee"). InnerVolumeSpecName "kube-api-access-fqh86". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663045 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663102 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v49x6\" (UniqueName: \"kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663141 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663212 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663228 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqh86\" (UniqueName: \"kubernetes.io/projected/965ed13f-051e-4ae2-9669-d4eede4b70ee-kube-api-access-fqh86\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663241 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/965ed13f-051e-4ae2-9669-d4eede4b70ee-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.663254 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/965ed13f-051e-4ae2-9669-d4eede4b70ee-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.664345 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.664996 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.666580 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.692122 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v49x6\" (UniqueName: \"kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6\") pod \"route-controller-manager-67855659d4-lpw9z\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:31 crc kubenswrapper[4998]: I0203 06:50:31.776729 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.183070 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:50:32 crc kubenswrapper[4998]: W0203 06:50:32.192159 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f0c28c3_5340_40bd_88be_2192d07ba4b5.slice/crio-4b82f465a3e809d6101e700455f11c18a2e7613c494615d21c6db445f305fbeb WatchSource:0}: Error finding container 4b82f465a3e809d6101e700455f11c18a2e7613c494615d21c6db445f305fbeb: Status 404 returned error can't find the container with id 4b82f465a3e809d6101e700455f11c18a2e7613c494615d21c6db445f305fbeb Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.390713 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-644d86f9bc-hzgnv_965ed13f-051e-4ae2-9669-d4eede4b70ee/route-controller-manager/0.log" Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.391200 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.391200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv" event={"ID":"965ed13f-051e-4ae2-9669-d4eede4b70ee","Type":"ContainerDied","Data":"dddac7557bf701e5caf18ea287db6b947d160254bf6992620ffe1b9829a302f1"} Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.391275 4998 scope.go:117] "RemoveContainer" containerID="b4a2c30d51dd61c55e39a3ddf0f274f153a0eba11a507013c4f20a8378f5cc47" Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.392473 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" event={"ID":"2f0c28c3-5340-40bd-88be-2192d07ba4b5","Type":"ContainerStarted","Data":"aff4ae788289eb29c1d08c2836bbadab26c769ceed8321eb681a627e786bbba2"} Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.392493 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" event={"ID":"2f0c28c3-5340-40bd-88be-2192d07ba4b5","Type":"ContainerStarted","Data":"4b82f465a3e809d6101e700455f11c18a2e7613c494615d21c6db445f305fbeb"} Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.413995 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" podStartSLOduration=3.4139817470000002 podStartE2EDuration="3.413981747s" podCreationTimestamp="2026-02-03 06:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:50:32.412431537 +0000 UTC m=+270.699125353" watchObservedRunningTime="2026-02-03 06:50:32.413981747 +0000 UTC m=+270.700675553" Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.435523 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:32 crc kubenswrapper[4998]: I0203 06:50:32.435566 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-hzgnv"] Feb 03 06:50:33 crc kubenswrapper[4998]: I0203 06:50:33.399681 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:33 crc kubenswrapper[4998]: I0203 06:50:33.404822 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:50:34 crc kubenswrapper[4998]: I0203 06:50:34.440971 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="965ed13f-051e-4ae2-9669-d4eede4b70ee" path="/var/lib/kubelet/pods/965ed13f-051e-4ae2-9669-d4eede4b70ee/volumes" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.168539 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.169300 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" podUID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" containerName="controller-manager" containerID="cri-o://b58feb02f1372fc68ff5fa5cd66f5843b0872e419447172ba749e98c5a30634e" gracePeriod=30 Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.176990 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.177223 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" podUID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" containerName="route-controller-manager" containerID="cri-o://aff4ae788289eb29c1d08c2836bbadab26c769ceed8321eb681a627e786bbba2" gracePeriod=30 Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.232427 4998 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.573462 4998 generic.go:334] "Generic (PLEG): container finished" podID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" containerID="aff4ae788289eb29c1d08c2836bbadab26c769ceed8321eb681a627e786bbba2" exitCode=0 Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.573542 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" event={"ID":"2f0c28c3-5340-40bd-88be-2192d07ba4b5","Type":"ContainerDied","Data":"aff4ae788289eb29c1d08c2836bbadab26c769ceed8321eb681a627e786bbba2"} Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.575059 4998 generic.go:334] "Generic (PLEG): container finished" podID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" containerID="b58feb02f1372fc68ff5fa5cd66f5843b0872e419447172ba749e98c5a30634e" exitCode=0 Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.575086 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" event={"ID":"ea3764b6-1bb7-4884-aa73-2e29fad5b33e","Type":"ContainerDied","Data":"b58feb02f1372fc68ff5fa5cd66f5843b0872e419447172ba749e98c5a30634e"} Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.690308 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.693873 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839245 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md75x\" (UniqueName: \"kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x\") pod \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839309 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert\") pod \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839377 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config\") pod \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839435 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert\") pod \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839473 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v49x6\" (UniqueName: \"kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6\") pod \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839560 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca\") pod \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\" (UID: \"2f0c28c3-5340-40bd-88be-2192d07ba4b5\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839623 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles\") pod \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839655 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca\") pod \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.839686 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config\") pod \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\" (UID: \"ea3764b6-1bb7-4884-aa73-2e29fad5b33e\") " Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.841061 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config" (OuterVolumeSpecName: "config") pod "ea3764b6-1bb7-4884-aa73-2e29fad5b33e" (UID: "ea3764b6-1bb7-4884-aa73-2e29fad5b33e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.842623 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca" (OuterVolumeSpecName: "client-ca") pod "2f0c28c3-5340-40bd-88be-2192d07ba4b5" (UID: "2f0c28c3-5340-40bd-88be-2192d07ba4b5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.842654 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca" (OuterVolumeSpecName: "client-ca") pod "ea3764b6-1bb7-4884-aa73-2e29fad5b33e" (UID: "ea3764b6-1bb7-4884-aa73-2e29fad5b33e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.842767 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ea3764b6-1bb7-4884-aa73-2e29fad5b33e" (UID: "ea3764b6-1bb7-4884-aa73-2e29fad5b33e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.843035 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config" (OuterVolumeSpecName: "config") pod "2f0c28c3-5340-40bd-88be-2192d07ba4b5" (UID: "2f0c28c3-5340-40bd-88be-2192d07ba4b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.844838 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2f0c28c3-5340-40bd-88be-2192d07ba4b5" (UID: "2f0c28c3-5340-40bd-88be-2192d07ba4b5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.844880 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ea3764b6-1bb7-4884-aa73-2e29fad5b33e" (UID: "ea3764b6-1bb7-4884-aa73-2e29fad5b33e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.845440 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x" (OuterVolumeSpecName: "kube-api-access-md75x") pod "ea3764b6-1bb7-4884-aa73-2e29fad5b33e" (UID: "ea3764b6-1bb7-4884-aa73-2e29fad5b33e"). InnerVolumeSpecName "kube-api-access-md75x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.846572 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6" (OuterVolumeSpecName: "kube-api-access-v49x6") pod "2f0c28c3-5340-40bd-88be-2192d07ba4b5" (UID: "2f0c28c3-5340-40bd-88be-2192d07ba4b5"). InnerVolumeSpecName "kube-api-access-v49x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941567 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f0c28c3-5340-40bd-88be-2192d07ba4b5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941604 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v49x6\" (UniqueName: \"kubernetes.io/projected/2f0c28c3-5340-40bd-88be-2192d07ba4b5-kube-api-access-v49x6\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941613 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941622 4998 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941630 4998 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941638 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941646 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md75x\" (UniqueName: \"kubernetes.io/projected/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-kube-api-access-md75x\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941654 4998 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea3764b6-1bb7-4884-aa73-2e29fad5b33e-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:02 crc kubenswrapper[4998]: I0203 06:51:02.941662 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f0c28c3-5340-40bd-88be-2192d07ba4b5-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.581276 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.581304 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z" event={"ID":"2f0c28c3-5340-40bd-88be-2192d07ba4b5","Type":"ContainerDied","Data":"4b82f465a3e809d6101e700455f11c18a2e7613c494615d21c6db445f305fbeb"} Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.581364 4998 scope.go:117] "RemoveContainer" containerID="aff4ae788289eb29c1d08c2836bbadab26c769ceed8321eb681a627e786bbba2" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.584083 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" event={"ID":"ea3764b6-1bb7-4884-aa73-2e29fad5b33e","Type":"ContainerDied","Data":"16d49e3c0e28e135a3200ed0a2304e31f4a2b8cd69e03a2c6a2e4de342995264"} Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.584128 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85d898fc-rvbjf" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.595444 4998 scope.go:117] "RemoveContainer" containerID="b58feb02f1372fc68ff5fa5cd66f5843b0872e419447172ba749e98c5a30634e" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.615554 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.621384 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67855659d4-lpw9z"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.628705 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.631915 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-85d898fc-rvbjf"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.904059 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g"] Feb 03 06:51:03 crc kubenswrapper[4998]: E0203 06:51:03.904946 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" containerName="route-controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.904971 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" containerName="route-controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: E0203 06:51:03.904988 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" containerName="controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.904996 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" containerName="controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.905141 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" containerName="controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.905158 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" containerName="route-controller-manager" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.905821 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.907809 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.908522 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.909201 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.909468 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.909585 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.910459 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.911549 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.914171 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.914813 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.914915 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.915320 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.915718 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.915727 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.919332 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.919999 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.921527 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.944672 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr"] Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.953750 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-client-ca\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954189 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-client-ca\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954449 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vplmk\" (UniqueName: \"kubernetes.io/projected/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-kube-api-access-vplmk\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954519 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/288b0d80-7e77-4839-9473-61d816ea83e7-serving-cert\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954607 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-proxy-ca-bundles\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954652 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-config\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954687 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-config\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:03 crc kubenswrapper[4998]: I0203 06:51:03.954804 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-serving-cert\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055311 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-config\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055366 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-serving-cert\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055386 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-client-ca\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055419 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-client-ca\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055451 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq8r4\" (UniqueName: \"kubernetes.io/projected/288b0d80-7e77-4839-9473-61d816ea83e7-kube-api-access-nq8r4\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055478 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vplmk\" (UniqueName: \"kubernetes.io/projected/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-kube-api-access-vplmk\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055497 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/288b0d80-7e77-4839-9473-61d816ea83e7-serving-cert\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055516 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-proxy-ca-bundles\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.055536 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-config\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.056461 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-client-ca\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.056683 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-config\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.057804 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-proxy-ca-bundles\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.058136 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-config\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.058228 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/288b0d80-7e77-4839-9473-61d816ea83e7-client-ca\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.061143 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-serving-cert\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.061148 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/288b0d80-7e77-4839-9473-61d816ea83e7-serving-cert\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.093926 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vplmk\" (UniqueName: \"kubernetes.io/projected/1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe-kube-api-access-vplmk\") pod \"route-controller-manager-644d86f9bc-n8zmr\" (UID: \"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe\") " pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.156743 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq8r4\" (UniqueName: \"kubernetes.io/projected/288b0d80-7e77-4839-9473-61d816ea83e7-kube-api-access-nq8r4\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.177675 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq8r4\" (UniqueName: \"kubernetes.io/projected/288b0d80-7e77-4839-9473-61d816ea83e7-kube-api-access-nq8r4\") pod \"controller-manager-c95b7c9f5-dwn7g\" (UID: \"288b0d80-7e77-4839-9473-61d816ea83e7\") " pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.228276 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.238432 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.439560 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f0c28c3-5340-40bd-88be-2192d07ba4b5" path="/var/lib/kubelet/pods/2f0c28c3-5340-40bd-88be-2192d07ba4b5/volumes" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.440385 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea3764b6-1bb7-4884-aa73-2e29fad5b33e" path="/var/lib/kubelet/pods/ea3764b6-1bb7-4884-aa73-2e29fad5b33e/volumes" Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.441223 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g"] Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.489554 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr"] Feb 03 06:51:04 crc kubenswrapper[4998]: W0203 06:51:04.497756 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f9ec9dc_142e_4ca1_a5d9_a83e1ac76ebe.slice/crio-c45671c5e2f047d3f6c927484b56f351696d6d313627d51460a319fb7aa14f8d WatchSource:0}: Error finding container c45671c5e2f047d3f6c927484b56f351696d6d313627d51460a319fb7aa14f8d: Status 404 returned error can't find the container with id c45671c5e2f047d3f6c927484b56f351696d6d313627d51460a319fb7aa14f8d Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.592946 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" event={"ID":"288b0d80-7e77-4839-9473-61d816ea83e7","Type":"ContainerStarted","Data":"a5af17ffc595d4a079d0c71f6159a555370440c7b4dceaa7b9e25af91d99b4c4"} Feb 03 06:51:04 crc kubenswrapper[4998]: I0203 06:51:04.593651 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" event={"ID":"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe","Type":"ContainerStarted","Data":"c45671c5e2f047d3f6c927484b56f351696d6d313627d51460a319fb7aa14f8d"} Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.601920 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" event={"ID":"1f9ec9dc-142e-4ca1-a5d9-a83e1ac76ebe","Type":"ContainerStarted","Data":"a2fa2eaeed57a04934e5c4e9d6d815fac36554c76f5b108fc2331bf6be4199f5"} Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.602410 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.605001 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" event={"ID":"288b0d80-7e77-4839-9473-61d816ea83e7","Type":"ContainerStarted","Data":"f42108143e871bfe3426ef57cbe36c547ebc80054edccbdf7c169ffa3ef41219"} Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.605305 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.610875 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.610931 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.627696 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-644d86f9bc-n8zmr" podStartSLOduration=3.62767728 podStartE2EDuration="3.62767728s" podCreationTimestamp="2026-02-03 06:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:51:05.624935613 +0000 UTC m=+303.911629429" watchObservedRunningTime="2026-02-03 06:51:05.62767728 +0000 UTC m=+303.914371086" Feb 03 06:51:05 crc kubenswrapper[4998]: I0203 06:51:05.641858 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c95b7c9f5-dwn7g" podStartSLOduration=3.6418303290000003 podStartE2EDuration="3.641830329s" podCreationTimestamp="2026-02-03 06:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:51:05.640905749 +0000 UTC m=+303.927599615" watchObservedRunningTime="2026-02-03 06:51:05.641830329 +0000 UTC m=+303.928524165" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.619295 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mcpbl"] Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.620834 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.637954 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mcpbl"] Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803442 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-tls\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803486 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-trusted-ca\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803548 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f64b6e91-0778-4d5c-85fe-99d029ea99b5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803632 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803661 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-bound-sa-token\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803705 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfxzg\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-kube-api-access-lfxzg\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803725 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-certificates\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.803750 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f64b6e91-0778-4d5c-85fe-99d029ea99b5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.821643 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904668 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-bound-sa-token\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904756 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-certificates\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904813 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfxzg\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-kube-api-access-lfxzg\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904853 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f64b6e91-0778-4d5c-85fe-99d029ea99b5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904914 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-tls\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904947 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-trusted-ca\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.904990 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f64b6e91-0778-4d5c-85fe-99d029ea99b5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.905515 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f64b6e91-0778-4d5c-85fe-99d029ea99b5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.906923 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-certificates\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.908085 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f64b6e91-0778-4d5c-85fe-99d029ea99b5-trusted-ca\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.918974 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-registry-tls\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.919103 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f64b6e91-0778-4d5c-85fe-99d029ea99b5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.922664 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfxzg\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-kube-api-access-lfxzg\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.932657 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f64b6e91-0778-4d5c-85fe-99d029ea99b5-bound-sa-token\") pod \"image-registry-66df7c8f76-mcpbl\" (UID: \"f64b6e91-0778-4d5c-85fe-99d029ea99b5\") " pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:23 crc kubenswrapper[4998]: I0203 06:51:23.939186 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:24 crc kubenswrapper[4998]: I0203 06:51:24.364542 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mcpbl"] Feb 03 06:51:24 crc kubenswrapper[4998]: I0203 06:51:24.712004 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" event={"ID":"f64b6e91-0778-4d5c-85fe-99d029ea99b5","Type":"ContainerStarted","Data":"9bef2bb584c5e03782c420611e5007013acdcb53056444c1767e88bbfb290c0e"} Feb 03 06:51:24 crc kubenswrapper[4998]: I0203 06:51:24.712056 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" event={"ID":"f64b6e91-0778-4d5c-85fe-99d029ea99b5","Type":"ContainerStarted","Data":"4b8dd05f89fabc92fba0bb9665e0e065b87cb1d2a99fb7d6c168a7437952a05a"} Feb 03 06:51:24 crc kubenswrapper[4998]: I0203 06:51:24.712157 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:24 crc kubenswrapper[4998]: I0203 06:51:24.733111 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" podStartSLOduration=1.73309435 podStartE2EDuration="1.73309435s" podCreationTimestamp="2026-02-03 06:51:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:51:24.73245081 +0000 UTC m=+323.019144636" watchObservedRunningTime="2026-02-03 06:51:24.73309435 +0000 UTC m=+323.019788156" Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.752935 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.755476 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-95844" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="registry-server" containerID="cri-o://0f407394414efd64399dc958afb415ab01c98dfc9f82e4de347d2d777547a93f" gracePeriod=30 Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.761019 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.761371 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zmxrc" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="registry-server" containerID="cri-o://fc34d709e8c1399b599bb382561f6fb3dd4004933a99ec29dbaed46c46dd2253" gracePeriod=30 Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.774851 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.775143 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" containerID="cri-o://8ad4b8327e071d15241f172ee52e86c4403a6249e1c9ca5ce255f1192c9b29fc" gracePeriod=30 Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.780865 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.781148 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xhhkb" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="registry-server" containerID="cri-o://ef84e3dbfc1404c2511bafce7241163047187d2de6bcaef604eeebdd4b6af1dc" gracePeriod=30 Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.794463 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bft6r"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.795315 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.800189 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.800448 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kqlxq" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="registry-server" containerID="cri-o://955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" gracePeriod=30 Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.805260 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bft6r"] Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.914008 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.914050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7zwn\" (UniqueName: \"kubernetes.io/projected/9bd422b5-35b7-48e1-8fc7-b07a448c703a-kube-api-access-q7zwn\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:31 crc kubenswrapper[4998]: I0203 06:51:31.914086 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.015542 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.015606 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7zwn\" (UniqueName: \"kubernetes.io/projected/9bd422b5-35b7-48e1-8fc7-b07a448c703a-kube-api-access-q7zwn\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.015652 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.018393 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.022046 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9bd422b5-35b7-48e1-8fc7-b07a448c703a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.035031 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7zwn\" (UniqueName: \"kubernetes.io/projected/9bd422b5-35b7-48e1-8fc7-b07a448c703a-kube-api-access-q7zwn\") pod \"marketplace-operator-79b997595-bft6r\" (UID: \"9bd422b5-35b7-48e1-8fc7-b07a448c703a\") " pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.229728 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.623832 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bft6r"] Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.757416 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerID="ef84e3dbfc1404c2511bafce7241163047187d2de6bcaef604eeebdd4b6af1dc" exitCode=0 Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.757486 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerDied","Data":"ef84e3dbfc1404c2511bafce7241163047187d2de6bcaef604eeebdd4b6af1dc"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.760186 4998 generic.go:334] "Generic (PLEG): container finished" podID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerID="8ad4b8327e071d15241f172ee52e86c4403a6249e1c9ca5ce255f1192c9b29fc" exitCode=0 Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.760373 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerDied","Data":"8ad4b8327e071d15241f172ee52e86c4403a6249e1c9ca5ce255f1192c9b29fc"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.760521 4998 scope.go:117] "RemoveContainer" containerID="3a64151fe912b987d3600efafaf1ba6a364f80d47a503b4ddf6cecedf9d90c0b" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.767625 4998 generic.go:334] "Generic (PLEG): container finished" podID="efee4303-e909-4341-b837-326fda8a64e1" containerID="fc34d709e8c1399b599bb382561f6fb3dd4004933a99ec29dbaed46c46dd2253" exitCode=0 Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.767703 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerDied","Data":"fc34d709e8c1399b599bb382561f6fb3dd4004933a99ec29dbaed46c46dd2253"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.769990 4998 generic.go:334] "Generic (PLEG): container finished" podID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerID="955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" exitCode=0 Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.770042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerDied","Data":"955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.771740 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" event={"ID":"9bd422b5-35b7-48e1-8fc7-b07a448c703a","Type":"ContainerStarted","Data":"144deee77c1582e901b5e5314f8074416f653d589f3fb24d6e98559a70c9ab27"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.774760 4998 generic.go:334] "Generic (PLEG): container finished" podID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerID="0f407394414efd64399dc958afb415ab01c98dfc9f82e4de347d2d777547a93f" exitCode=0 Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.774917 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerDied","Data":"0f407394414efd64399dc958afb415ab01c98dfc9f82e4de347d2d777547a93f"} Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.789868 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:51:32 crc kubenswrapper[4998]: E0203 06:51:32.840151 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503 is running failed: container process not found" containerID="955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.840523 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkk89\" (UniqueName: \"kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89\") pod \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.840573 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content\") pod \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.840631 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities\") pod \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\" (UID: \"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03\") " Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.841609 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities" (OuterVolumeSpecName: "utilities") pod "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" (UID: "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.845763 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89" (OuterVolumeSpecName: "kube-api-access-kkk89") pod "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" (UID: "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03"). InnerVolumeSpecName "kube-api-access-kkk89". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:32 crc kubenswrapper[4998]: E0203 06:51:32.845860 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503 is running failed: container process not found" containerID="955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 06:51:32 crc kubenswrapper[4998]: E0203 06:51:32.846102 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503 is running failed: container process not found" containerID="955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 06:51:32 crc kubenswrapper[4998]: E0203 06:51:32.846147 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-kqlxq" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="registry-server" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.914108 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" (UID: "3c650fe1-e6ec-4079-b7ab-a0fb0683ac03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.943577 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.943610 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkk89\" (UniqueName: \"kubernetes.io/projected/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-kube-api-access-kkk89\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:32 crc kubenswrapper[4998]: I0203 06:51:32.943651 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.025012 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.034189 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.037921 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.046187 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145473 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hnkq\" (UniqueName: \"kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq\") pod \"efee4303-e909-4341-b837-326fda8a64e1\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145533 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vllrc\" (UniqueName: \"kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc\") pod \"d8395812-3fcd-41cd-910d-f195e7d94fbc\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145567 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkz2b\" (UniqueName: \"kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b\") pod \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145611 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content\") pod \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145648 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities\") pod \"d8395812-3fcd-41cd-910d-f195e7d94fbc\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145685 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content\") pod \"efee4303-e909-4341-b837-326fda8a64e1\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145705 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities\") pod \"efee4303-e909-4341-b837-326fda8a64e1\" (UID: \"efee4303-e909-4341-b837-326fda8a64e1\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145764 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzrrc\" (UniqueName: \"kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc\") pod \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145819 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca\") pod \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145850 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics\") pod \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\" (UID: \"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145898 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities\") pod \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\" (UID: \"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.145922 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content\") pod \"d8395812-3fcd-41cd-910d-f195e7d94fbc\" (UID: \"d8395812-3fcd-41cd-910d-f195e7d94fbc\") " Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.146767 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities" (OuterVolumeSpecName: "utilities") pod "d8395812-3fcd-41cd-910d-f195e7d94fbc" (UID: "d8395812-3fcd-41cd-910d-f195e7d94fbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.151538 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc" (OuterVolumeSpecName: "kube-api-access-vllrc") pod "d8395812-3fcd-41cd-910d-f195e7d94fbc" (UID: "d8395812-3fcd-41cd-910d-f195e7d94fbc"). InnerVolumeSpecName "kube-api-access-vllrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.152705 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities" (OuterVolumeSpecName: "utilities") pod "efee4303-e909-4341-b837-326fda8a64e1" (UID: "efee4303-e909-4341-b837-326fda8a64e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.153663 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b" (OuterVolumeSpecName: "kube-api-access-mkz2b") pod "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" (UID: "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696"). InnerVolumeSpecName "kube-api-access-mkz2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.154233 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" (UID: "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.162647 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" (UID: "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.162768 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc" (OuterVolumeSpecName: "kube-api-access-dzrrc") pod "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" (UID: "8d0e557a-d9eb-4b35-a6a7-e6ab993d5464"). InnerVolumeSpecName "kube-api-access-dzrrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.162936 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq" (OuterVolumeSpecName: "kube-api-access-9hnkq") pod "efee4303-e909-4341-b837-326fda8a64e1" (UID: "efee4303-e909-4341-b837-326fda8a64e1"). InnerVolumeSpecName "kube-api-access-9hnkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.166368 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities" (OuterVolumeSpecName: "utilities") pod "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" (UID: "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.181997 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" (UID: "c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.219638 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efee4303-e909-4341-b837-326fda8a64e1" (UID: "efee4303-e909-4341-b837-326fda8a64e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.246939 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzrrc\" (UniqueName: \"kubernetes.io/projected/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-kube-api-access-dzrrc\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.246985 4998 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.246994 4998 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247005 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247016 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hnkq\" (UniqueName: \"kubernetes.io/projected/efee4303-e909-4341-b837-326fda8a64e1-kube-api-access-9hnkq\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247024 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vllrc\" (UniqueName: \"kubernetes.io/projected/d8395812-3fcd-41cd-910d-f195e7d94fbc-kube-api-access-vllrc\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247035 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkz2b\" (UniqueName: \"kubernetes.io/projected/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-kube-api-access-mkz2b\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247054 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247066 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247076 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.247087 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efee4303-e909-4341-b837-326fda8a64e1-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.287265 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8395812-3fcd-41cd-910d-f195e7d94fbc" (UID: "d8395812-3fcd-41cd-910d-f195e7d94fbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.347776 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8395812-3fcd-41cd-910d-f195e7d94fbc-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.784078 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqlxq" event={"ID":"d8395812-3fcd-41cd-910d-f195e7d94fbc","Type":"ContainerDied","Data":"248c47ff93d9c7a9ebe0c8f5cbd8c890c67ffe1ac085bc9efb34b90859da82f2"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.784412 4998 scope.go:117] "RemoveContainer" containerID="955257e9082107b4ea7866274da12a6c7b207105a28c53405fab2fa17f012503" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.784588 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqlxq" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.786995 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" event={"ID":"9bd422b5-35b7-48e1-8fc7-b07a448c703a","Type":"ContainerStarted","Data":"c3bc4c7401b384b3f0ca8388699b33264a0410de6f19d50bab62add3f55f3e57"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.787673 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.790427 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.791635 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-95844" event={"ID":"3c650fe1-e6ec-4079-b7ab-a0fb0683ac03","Type":"ContainerDied","Data":"299a490c6409a972a4db9069cba5ad9bd42e626f07252de9405ed23cd977561b"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.791834 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-95844" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.795719 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhkb" event={"ID":"c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696","Type":"ContainerDied","Data":"058656f79c56160783cba0dc4191aa1fd0e516ed237cb316fcaa2f521046fcab"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.795922 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhkb" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.799383 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" event={"ID":"8d0e557a-d9eb-4b35-a6a7-e6ab993d5464","Type":"ContainerDied","Data":"ee64d83406b7c8fbf8e3c67093ff174bca2a983464a1d4791fecb2cbb6b33d8c"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.799671 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6d7sj" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.805226 4998 scope.go:117] "RemoveContainer" containerID="76eed354837e0d6bbcdef3f959478b2fcd971a0268ae7241b2c5ad77979d41d9" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.805816 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmxrc" event={"ID":"efee4303-e909-4341-b837-326fda8a64e1","Type":"ContainerDied","Data":"bf38f8d6c5ea4afb76eaa4deb5b018f452abdd17a130adc7772f521638c01b3b"} Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.806284 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmxrc" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.822159 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bft6r" podStartSLOduration=2.822141353 podStartE2EDuration="2.822141353s" podCreationTimestamp="2026-02-03 06:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:51:33.818766189 +0000 UTC m=+332.105460045" watchObservedRunningTime="2026-02-03 06:51:33.822141353 +0000 UTC m=+332.108835159" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.852366 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.853235 4998 scope.go:117] "RemoveContainer" containerID="87278e48b9de53d6d1cf78284a5f0c3ff3bff4afc50069d979429220afb88c72" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.856894 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kqlxq"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.868084 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.880395 4998 scope.go:117] "RemoveContainer" containerID="0f407394414efd64399dc958afb415ab01c98dfc9f82e4de347d2d777547a93f" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.889525 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zmxrc"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.900141 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.905400 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-95844"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.911017 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.915646 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhkb"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.916196 4998 scope.go:117] "RemoveContainer" containerID="286a761b83ed06f6d8d841dd603c0b25772806ff269bb0d368f8fe2875c6c38d" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.919675 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.923278 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6d7sj"] Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.935508 4998 scope.go:117] "RemoveContainer" containerID="f404176cf0b908df41743d5208a4ea5503dcf40826d9e1af71c5372a6bf42d53" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.948557 4998 scope.go:117] "RemoveContainer" containerID="ef84e3dbfc1404c2511bafce7241163047187d2de6bcaef604eeebdd4b6af1dc" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.969996 4998 scope.go:117] "RemoveContainer" containerID="24fad0b768cb95bca766b104ebf7737cffd4c8de4cdb6e8d354ce1f1e059eefc" Feb 03 06:51:33 crc kubenswrapper[4998]: I0203 06:51:33.980638 4998 scope.go:117] "RemoveContainer" containerID="8089954fd218a5fad602012eac2a25203f15de08d351e3a0c3c388969545ccfe" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.001593 4998 scope.go:117] "RemoveContainer" containerID="8ad4b8327e071d15241f172ee52e86c4403a6249e1c9ca5ce255f1192c9b29fc" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.025502 4998 scope.go:117] "RemoveContainer" containerID="fc34d709e8c1399b599bb382561f6fb3dd4004933a99ec29dbaed46c46dd2253" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.037408 4998 scope.go:117] "RemoveContainer" containerID="8f1c1579cb5978633a91a18b85320642c24c0b9062f57f8d15ad47540cc08f24" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.052972 4998 scope.go:117] "RemoveContainer" containerID="0f140efc247f780bc41631c4c8b2db0c668f8dee04d347a02d897b31d7241177" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.435505 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" path="/var/lib/kubelet/pods/3c650fe1-e6ec-4079-b7ab-a0fb0683ac03/volumes" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.436469 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" path="/var/lib/kubelet/pods/8d0e557a-d9eb-4b35-a6a7-e6ab993d5464/volumes" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.437128 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" path="/var/lib/kubelet/pods/c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696/volumes" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.438551 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" path="/var/lib/kubelet/pods/d8395812-3fcd-41cd-910d-f195e7d94fbc/volumes" Feb 03 06:51:34 crc kubenswrapper[4998]: I0203 06:51:34.439354 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efee4303-e909-4341-b837-326fda8a64e1" path="/var/lib/kubelet/pods/efee4303-e909-4341-b837-326fda8a64e1/volumes" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.962450 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.962925 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.962941 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.962954 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.962960 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.962967 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.962973 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.962982 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.962988 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.962997 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963002 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963010 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963015 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963022 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963027 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963036 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963043 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="extract-content" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963051 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963058 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963071 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963082 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963091 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963099 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963110 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963117 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="extract-utilities" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963125 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963132 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963222 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8395812-3fcd-41cd-910d-f195e7d94fbc" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963235 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="efee4303-e909-4341-b837-326fda8a64e1" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963241 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963249 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f8e3ac-85c1-42e7-8f9c-7c8f3e616696" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963254 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c650fe1-e6ec-4079-b7ab-a0fb0683ac03" containerName="registry-server" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963261 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: E0203 06:51:35.963334 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963340 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0e557a-d9eb-4b35-a6a7-e6ab993d5464" containerName="marketplace-operator" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.963986 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.969588 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.972516 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.989991 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26zv7\" (UniqueName: \"kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.990047 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:35 crc kubenswrapper[4998]: I0203 06:51:35.990078 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.090651 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26zv7\" (UniqueName: \"kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.090730 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.090774 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.091241 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.091294 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.111930 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26zv7\" (UniqueName: \"kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7\") pod \"community-operators-2rc5f\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.165584 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.166669 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.169702 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.170223 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.191860 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.192216 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.192334 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r69rv\" (UniqueName: \"kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.284999 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.293661 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.293866 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.294094 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r69rv\" (UniqueName: \"kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.294140 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.294224 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.313791 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r69rv\" (UniqueName: \"kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv\") pod \"redhat-operators-jcps7\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.481859 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.673639 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 06:51:36 crc kubenswrapper[4998]: W0203 06:51:36.675687 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f855b43_d82a_4ce2_8471_f3117bf7fd52.slice/crio-68bb625744288f4708835208bf695ad127944cdc9a738247f978dbd87e761b08 WatchSource:0}: Error finding container 68bb625744288f4708835208bf695ad127944cdc9a738247f978dbd87e761b08: Status 404 returned error can't find the container with id 68bb625744288f4708835208bf695ad127944cdc9a738247f978dbd87e761b08 Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.836385 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerStarted","Data":"68bb625744288f4708835208bf695ad127944cdc9a738247f978dbd87e761b08"} Feb 03 06:51:36 crc kubenswrapper[4998]: I0203 06:51:36.846275 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 06:51:36 crc kubenswrapper[4998]: W0203 06:51:36.852378 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99d0d95b_7ead_4827_bdcb_70656ad70707.slice/crio-fc54690b0ea5b6788bada296b35cb5867acbdb405b52cc09b1d2b4fdc304dd4a WatchSource:0}: Error finding container fc54690b0ea5b6788bada296b35cb5867acbdb405b52cc09b1d2b4fdc304dd4a: Status 404 returned error can't find the container with id fc54690b0ea5b6788bada296b35cb5867acbdb405b52cc09b1d2b4fdc304dd4a Feb 03 06:51:37 crc kubenswrapper[4998]: I0203 06:51:37.843977 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerID="bca87ad367f80113f0bbad6e3f6aec7ff29ea8c88084ad5c4ed3e4855914fee2" exitCode=0 Feb 03 06:51:37 crc kubenswrapper[4998]: I0203 06:51:37.844303 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerDied","Data":"bca87ad367f80113f0bbad6e3f6aec7ff29ea8c88084ad5c4ed3e4855914fee2"} Feb 03 06:51:37 crc kubenswrapper[4998]: I0203 06:51:37.846182 4998 generic.go:334] "Generic (PLEG): container finished" podID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerID="5ec8fc6e7d1b5b84a41fff8a705b1399ad9502c5710bf039cce33f76b4f1a1ad" exitCode=0 Feb 03 06:51:37 crc kubenswrapper[4998]: I0203 06:51:37.846232 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerDied","Data":"5ec8fc6e7d1b5b84a41fff8a705b1399ad9502c5710bf039cce33f76b4f1a1ad"} Feb 03 06:51:37 crc kubenswrapper[4998]: I0203 06:51:37.846261 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerStarted","Data":"fc54690b0ea5b6788bada296b35cb5867acbdb405b52cc09b1d2b4fdc304dd4a"} Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.366988 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ln6cb"] Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.368377 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.371225 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.372810 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ln6cb"] Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.418560 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rnxm\" (UniqueName: \"kubernetes.io/projected/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-kube-api-access-4rnxm\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.418618 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-utilities\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.418645 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-catalog-content\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.520123 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rnxm\" (UniqueName: \"kubernetes.io/projected/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-kube-api-access-4rnxm\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.520206 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-utilities\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.520239 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-catalog-content\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.521932 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-utilities\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.523083 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-catalog-content\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.547421 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rnxm\" (UniqueName: \"kubernetes.io/projected/a6732673-b9ab-4ac4-95d8-fec7cf3c7a44-kube-api-access-4rnxm\") pod \"redhat-marketplace-ln6cb\" (UID: \"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44\") " pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.563856 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.565378 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.569688 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.574249 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.621494 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.621558 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhdmx\" (UniqueName: \"kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.621586 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.689209 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.730496 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhdmx\" (UniqueName: \"kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.730879 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.731052 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.731309 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.731395 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.748367 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhdmx\" (UniqueName: \"kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx\") pod \"certified-operators-x6mnf\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.862534 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerStarted","Data":"6e5a31f733637e7892c467af40a0a19a86cb22a7a12ae9bc5704804a481d0d83"} Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.871191 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerStarted","Data":"54034c3a23543b29928ccb8e658c633a86eb29ed67d6a4f431f2995d78e535f6"} Feb 03 06:51:38 crc kubenswrapper[4998]: I0203 06:51:38.946433 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.116096 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ln6cb"] Feb 03 06:51:39 crc kubenswrapper[4998]: W0203 06:51:39.123373 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6732673_b9ab_4ac4_95d8_fec7cf3c7a44.slice/crio-1f941867ae5b0e54ee9ad04db7c4eda143eb17a726c3cc3b3b08994748ab2e18 WatchSource:0}: Error finding container 1f941867ae5b0e54ee9ad04db7c4eda143eb17a726c3cc3b3b08994748ab2e18: Status 404 returned error can't find the container with id 1f941867ae5b0e54ee9ad04db7c4eda143eb17a726c3cc3b3b08994748ab2e18 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.373663 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 06:51:39 crc kubenswrapper[4998]: W0203 06:51:39.396640 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc196c5c8_06df_4911_a73d_7ba33f57af97.slice/crio-f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429 WatchSource:0}: Error finding container f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429: Status 404 returned error can't find the container with id f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.877664 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerID="54034c3a23543b29928ccb8e658c633a86eb29ed67d6a4f431f2995d78e535f6" exitCode=0 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.877798 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerDied","Data":"54034c3a23543b29928ccb8e658c633a86eb29ed67d6a4f431f2995d78e535f6"} Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.888429 4998 generic.go:334] "Generic (PLEG): container finished" podID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerID="6e5a31f733637e7892c467af40a0a19a86cb22a7a12ae9bc5704804a481d0d83" exitCode=0 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.888538 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerDied","Data":"6e5a31f733637e7892c467af40a0a19a86cb22a7a12ae9bc5704804a481d0d83"} Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.896021 4998 generic.go:334] "Generic (PLEG): container finished" podID="a6732673-b9ab-4ac4-95d8-fec7cf3c7a44" containerID="3165249e4321c5f30a8953ce22a9a8dcf7ff9dda28541a7d335d06444c34587c" exitCode=0 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.896200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ln6cb" event={"ID":"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44","Type":"ContainerDied","Data":"3165249e4321c5f30a8953ce22a9a8dcf7ff9dda28541a7d335d06444c34587c"} Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.896278 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ln6cb" event={"ID":"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44","Type":"ContainerStarted","Data":"1f941867ae5b0e54ee9ad04db7c4eda143eb17a726c3cc3b3b08994748ab2e18"} Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.900993 4998 generic.go:334] "Generic (PLEG): container finished" podID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerID="a24cb0bc329dcde85b4ba03675e8bebcab28f815987b17bfb7914d1e40f0c6a3" exitCode=0 Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.901038 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerDied","Data":"a24cb0bc329dcde85b4ba03675e8bebcab28f815987b17bfb7914d1e40f0c6a3"} Feb 03 06:51:39 crc kubenswrapper[4998]: I0203 06:51:39.901068 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerStarted","Data":"f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429"} Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.912543 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerStarted","Data":"8ccc154fb3dc102db62d85ba00101dcd83b805c4d87df9f0b9a540d70767683a"} Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.918053 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerStarted","Data":"ff39885b9b255e358ba7542261ce909af80e0e6cef1edd1b4bea6b1bd5bea8e2"} Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.922686 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ln6cb" event={"ID":"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44","Type":"ContainerStarted","Data":"c3f9442fc973786953f2a8fe7e8318ae9026c4092a66cdf9d6b77172daa3c6d2"} Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.926598 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerStarted","Data":"54e5eed017ed1827111f1e52c3b5fdf77dada26480ba1750344092b6e9f0fcda"} Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.944462 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2rc5f" podStartSLOduration=3.303481671 podStartE2EDuration="5.944443625s" podCreationTimestamp="2026-02-03 06:51:35 +0000 UTC" firstStartedPulling="2026-02-03 06:51:37.846458152 +0000 UTC m=+336.133151948" lastFinishedPulling="2026-02-03 06:51:40.487420096 +0000 UTC m=+338.774113902" observedRunningTime="2026-02-03 06:51:40.938403068 +0000 UTC m=+339.225096884" watchObservedRunningTime="2026-02-03 06:51:40.944443625 +0000 UTC m=+339.231137431" Feb 03 06:51:40 crc kubenswrapper[4998]: I0203 06:51:40.956819 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jcps7" podStartSLOduration=2.419170365 podStartE2EDuration="4.956796245s" podCreationTimestamp="2026-02-03 06:51:36 +0000 UTC" firstStartedPulling="2026-02-03 06:51:37.854275493 +0000 UTC m=+336.140969309" lastFinishedPulling="2026-02-03 06:51:40.391901383 +0000 UTC m=+338.678595189" observedRunningTime="2026-02-03 06:51:40.9530499 +0000 UTC m=+339.239743726" watchObservedRunningTime="2026-02-03 06:51:40.956796245 +0000 UTC m=+339.243490051" Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.933994 4998 generic.go:334] "Generic (PLEG): container finished" podID="a6732673-b9ab-4ac4-95d8-fec7cf3c7a44" containerID="c3f9442fc973786953f2a8fe7e8318ae9026c4092a66cdf9d6b77172daa3c6d2" exitCode=0 Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.934329 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ln6cb" event={"ID":"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44","Type":"ContainerDied","Data":"c3f9442fc973786953f2a8fe7e8318ae9026c4092a66cdf9d6b77172daa3c6d2"} Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.934361 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ln6cb" event={"ID":"a6732673-b9ab-4ac4-95d8-fec7cf3c7a44","Type":"ContainerStarted","Data":"e7167a5fcd5678bcafefde4ace7b8a8bb53068f403f7e8ee1d0e6a9b19ac7ba1"} Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.936771 4998 generic.go:334] "Generic (PLEG): container finished" podID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerID="54e5eed017ed1827111f1e52c3b5fdf77dada26480ba1750344092b6e9f0fcda" exitCode=0 Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.937674 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerDied","Data":"54e5eed017ed1827111f1e52c3b5fdf77dada26480ba1750344092b6e9f0fcda"} Feb 03 06:51:41 crc kubenswrapper[4998]: I0203 06:51:41.955554 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ln6cb" podStartSLOduration=2.499034642 podStartE2EDuration="3.95553653s" podCreationTimestamp="2026-02-03 06:51:38 +0000 UTC" firstStartedPulling="2026-02-03 06:51:39.898181384 +0000 UTC m=+338.184875190" lastFinishedPulling="2026-02-03 06:51:41.354683262 +0000 UTC m=+339.641377078" observedRunningTime="2026-02-03 06:51:41.953094295 +0000 UTC m=+340.239788111" watchObservedRunningTime="2026-02-03 06:51:41.95553653 +0000 UTC m=+340.242230336" Feb 03 06:51:42 crc kubenswrapper[4998]: I0203 06:51:42.945302 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerStarted","Data":"322b19f8965428be2fac4f136fe61479e9dcfbdf8176f3b6b96bed6afe4416c9"} Feb 03 06:51:42 crc kubenswrapper[4998]: I0203 06:51:42.965847 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x6mnf" podStartSLOduration=2.325667373 podStartE2EDuration="4.965830453s" podCreationTimestamp="2026-02-03 06:51:38 +0000 UTC" firstStartedPulling="2026-02-03 06:51:39.902391454 +0000 UTC m=+338.189085250" lastFinishedPulling="2026-02-03 06:51:42.542554524 +0000 UTC m=+340.829248330" observedRunningTime="2026-02-03 06:51:42.963870842 +0000 UTC m=+341.250564668" watchObservedRunningTime="2026-02-03 06:51:42.965830453 +0000 UTC m=+341.252524259" Feb 03 06:51:43 crc kubenswrapper[4998]: I0203 06:51:43.944499 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-mcpbl" Feb 03 06:51:43 crc kubenswrapper[4998]: I0203 06:51:43.999490 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.285238 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.286343 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.329096 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.482968 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.483018 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:46 crc kubenswrapper[4998]: I0203 06:51:46.522081 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:47 crc kubenswrapper[4998]: I0203 06:51:47.005512 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 06:51:47 crc kubenswrapper[4998]: I0203 06:51:47.015127 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 06:51:48 crc kubenswrapper[4998]: I0203 06:51:48.689838 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:48 crc kubenswrapper[4998]: I0203 06:51:48.690233 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:48 crc kubenswrapper[4998]: I0203 06:51:48.739159 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:48 crc kubenswrapper[4998]: I0203 06:51:48.947128 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:48 crc kubenswrapper[4998]: I0203 06:51:48.947571 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:49 crc kubenswrapper[4998]: I0203 06:51:49.003679 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:51:49 crc kubenswrapper[4998]: I0203 06:51:49.036373 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ln6cb" Feb 03 06:51:50 crc kubenswrapper[4998]: I0203 06:51:50.041825 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.029843 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" podUID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" containerName="registry" containerID="cri-o://bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415" gracePeriod=30 Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.375210 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.539675 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.539799 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.539857 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540168 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540231 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540252 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540278 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg268\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540302 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls\") pod \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\" (UID: \"b088ff98-5a5a-465d-ad48-9017fdebc2bf\") " Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.540868 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.541242 4998 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.541384 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.545420 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.548741 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268" (OuterVolumeSpecName: "kube-api-access-bg268") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "kube-api-access-bg268". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.552580 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.553475 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.553926 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.557850 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b088ff98-5a5a-465d-ad48-9017fdebc2bf" (UID: "b088ff98-5a5a-465d-ad48-9017fdebc2bf"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642172 4998 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642216 4998 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b088ff98-5a5a-465d-ad48-9017fdebc2bf-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642231 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b088ff98-5a5a-465d-ad48-9017fdebc2bf-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642239 4998 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642247 4998 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b088ff98-5a5a-465d-ad48-9017fdebc2bf-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:09 crc kubenswrapper[4998]: I0203 06:52:09.642258 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg268\" (UniqueName: \"kubernetes.io/projected/b088ff98-5a5a-465d-ad48-9017fdebc2bf-kube-api-access-bg268\") on node \"crc\" DevicePath \"\"" Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.098817 4998 generic.go:334] "Generic (PLEG): container finished" podID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" containerID="bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415" exitCode=0 Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.098858 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" event={"ID":"b088ff98-5a5a-465d-ad48-9017fdebc2bf","Type":"ContainerDied","Data":"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415"} Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.098887 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" event={"ID":"b088ff98-5a5a-465d-ad48-9017fdebc2bf","Type":"ContainerDied","Data":"dba47fbc8c7fff5cf599d5913324ffa3987e798cd36f2dea2b1c6df7c4b967a1"} Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.098859 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2jbwn" Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.098906 4998 scope.go:117] "RemoveContainer" containerID="bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415" Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.118131 4998 scope.go:117] "RemoveContainer" containerID="bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415" Feb 03 06:52:10 crc kubenswrapper[4998]: E0203 06:52:10.118683 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415\": container with ID starting with bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415 not found: ID does not exist" containerID="bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415" Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.118748 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415"} err="failed to get container status \"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415\": rpc error: code = NotFound desc = could not find container \"bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415\": container with ID starting with bbf9d0b85feb7127bbd1ad812165ea29523d96a155103d781e6153abed7c8415 not found: ID does not exist" Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.126077 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.129877 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2jbwn"] Feb 03 06:52:10 crc kubenswrapper[4998]: I0203 06:52:10.433710 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" path="/var/lib/kubelet/pods/b088ff98-5a5a-465d-ad48-9017fdebc2bf/volumes" Feb 03 06:52:12 crc kubenswrapper[4998]: I0203 06:52:12.753905 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:52:12 crc kubenswrapper[4998]: I0203 06:52:12.754358 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:52:42 crc kubenswrapper[4998]: I0203 06:52:42.754219 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:52:42 crc kubenswrapper[4998]: I0203 06:52:42.754848 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:53:12 crc kubenswrapper[4998]: I0203 06:53:12.754592 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:53:12 crc kubenswrapper[4998]: I0203 06:53:12.755125 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:53:12 crc kubenswrapper[4998]: I0203 06:53:12.755171 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:53:12 crc kubenswrapper[4998]: I0203 06:53:12.755798 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 06:53:12 crc kubenswrapper[4998]: I0203 06:53:12.755863 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b" gracePeriod=600 Feb 03 06:53:13 crc kubenswrapper[4998]: I0203 06:53:13.431844 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b" exitCode=0 Feb 03 06:53:13 crc kubenswrapper[4998]: I0203 06:53:13.431990 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b"} Feb 03 06:53:13 crc kubenswrapper[4998]: I0203 06:53:13.432192 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e"} Feb 03 06:53:13 crc kubenswrapper[4998]: I0203 06:53:13.432215 4998 scope.go:117] "RemoveContainer" containerID="8ab581feac753b27611d84de9a135dada6c09b508339e915247f806c6d69db1e" Feb 03 06:55:12 crc kubenswrapper[4998]: I0203 06:55:12.753974 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:55:12 crc kubenswrapper[4998]: I0203 06:55:12.754737 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:55:42 crc kubenswrapper[4998]: I0203 06:55:42.754029 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:55:42 crc kubenswrapper[4998]: I0203 06:55:42.754576 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:56:12 crc kubenswrapper[4998]: I0203 06:56:12.753983 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:56:12 crc kubenswrapper[4998]: I0203 06:56:12.754813 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:56:12 crc kubenswrapper[4998]: I0203 06:56:12.754873 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:56:12 crc kubenswrapper[4998]: I0203 06:56:12.755484 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 06:56:12 crc kubenswrapper[4998]: I0203 06:56:12.755554 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e" gracePeriod=600 Feb 03 06:56:13 crc kubenswrapper[4998]: I0203 06:56:13.495240 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e" exitCode=0 Feb 03 06:56:13 crc kubenswrapper[4998]: I0203 06:56:13.495567 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e"} Feb 03 06:56:13 crc kubenswrapper[4998]: I0203 06:56:13.495599 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332"} Feb 03 06:56:13 crc kubenswrapper[4998]: I0203 06:56:13.495618 4998 scope.go:117] "RemoveContainer" containerID="e7b04cf1ae37e7960e02eb60e4973e5642ac44c2988a51dc4f455c758b1cbe4b" Feb 03 06:57:41 crc kubenswrapper[4998]: I0203 06:57:41.705710 4998 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 03 06:58:12 crc kubenswrapper[4998]: I0203 06:58:12.754353 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:58:12 crc kubenswrapper[4998]: I0203 06:58:12.755000 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.830181 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7b8d"] Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831492 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-controller" containerID="cri-o://3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831570 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="nbdb" containerID="cri-o://0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831663 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="northd" containerID="cri-o://200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831732 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831819 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-node" containerID="cri-o://0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831878 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-acl-logging" containerID="cri-o://9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.831976 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="sbdb" containerID="cri-o://943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" gracePeriod=30 Feb 03 06:58:29 crc kubenswrapper[4998]: I0203 06:58:29.872878 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" containerID="cri-o://0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" gracePeriod=30 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.122265 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/2.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.124422 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovn-acl-logging/0.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.124972 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovn-controller/0.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.125346 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172306 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172388 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172412 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172412 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172523 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket" (OuterVolumeSpecName: "log-socket") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172585 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log" (OuterVolumeSpecName: "node-log") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172654 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172685 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zmz2\" (UniqueName: \"kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172729 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172738 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172751 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172795 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172815 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172860 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172916 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172942 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.172967 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173032 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173124 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173157 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173181 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173208 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173230 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn\") pod \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\" (UID: \"f7418b1d-9f7d-48cd-aac4-6a1b85967841\") " Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173579 4998 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173604 4998 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-log-socket\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173615 4998 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-node-log\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173627 4998 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173662 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173688 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash" (OuterVolumeSpecName: "host-slash") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173711 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173733 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173758 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173896 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173935 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.173928 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.174079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.174123 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.174333 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.174349 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.174708 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178435 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q8m29"] Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178755 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-node" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178797 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-node" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178811 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-acl-logging" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178819 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-acl-logging" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178829 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="northd" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178838 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="northd" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178851 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178879 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178893 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178901 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178916 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178924 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178933 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="sbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178959 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="sbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178969 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178977 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.178987 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.178994 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.179006 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="nbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179015 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="nbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.179046 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179054 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.179062 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" containerName="registry" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179069 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" containerName="registry" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.179081 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kubecfg-setup" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179087 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kubecfg-setup" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179235 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179250 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179279 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovn-acl-logging" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179288 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179296 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="nbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179306 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="northd" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179319 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="sbdb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179329 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-node" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179364 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179379 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b088ff98-5a5a-465d-ad48-9017fdebc2bf" containerName="registry" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179476 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179661 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.179697 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerName="ovnkube-controller" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.180549 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2" (OuterVolumeSpecName: "kube-api-access-9zmz2") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "kube-api-access-9zmz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.182358 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.203579 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f7418b1d-9f7d-48cd-aac4-6a1b85967841" (UID: "f7418b1d-9f7d-48cd-aac4-6a1b85967841"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275096 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqpjj\" (UniqueName: \"kubernetes.io/projected/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-kube-api-access-nqpjj\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275127 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-log-socket\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275151 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-netns\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275177 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-kubelet\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275287 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-config\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275375 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-env-overrides\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275447 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-ovn\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275486 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-node-log\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275535 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-slash\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275549 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-bin\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275565 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-var-lib-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275658 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-netd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275688 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275717 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovn-node-metrics-cert\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275747 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-systemd-units\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275820 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-systemd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275848 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-script-lib\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275870 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-etc-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275968 4998 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.275985 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zmz2\" (UniqueName: \"kubernetes.io/projected/f7418b1d-9f7d-48cd-aac4-6a1b85967841-kube-api-access-9zmz2\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276000 4998 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276012 4998 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276023 4998 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276035 4998 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276048 4998 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276061 4998 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276073 4998 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276084 4998 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276097 4998 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276108 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7418b1d-9f7d-48cd-aac4-6a1b85967841-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276122 4998 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-slash\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276133 4998 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276144 4998 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.276156 4998 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7418b1d-9f7d-48cd-aac4-6a1b85967841-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.292058 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dm4vz_2cba0dd3-b238-4ad4-9517-e2bf7d30b635/kube-multus/1.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.292547 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dm4vz_2cba0dd3-b238-4ad4-9517-e2bf7d30b635/kube-multus/0.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.292598 4998 generic.go:334] "Generic (PLEG): container finished" podID="2cba0dd3-b238-4ad4-9517-e2bf7d30b635" containerID="d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19" exitCode=2 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.292674 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerDied","Data":"d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.292778 4998 scope.go:117] "RemoveContainer" containerID="91ddb23bc0f787c0874522cb2f92e61f895a03595a116248d27fbc2030a1cf92" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.293225 4998 scope.go:117] "RemoveContainer" containerID="d6afdba061c0dd83d8135146f145d9e6a1a20d03991ade7d3f9e9a925924de19" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.295568 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovnkube-controller/2.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.298482 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovn-acl-logging/0.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299457 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7b8d_f7418b1d-9f7d-48cd-aac4-6a1b85967841/ovn-controller/0.log" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299845 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299873 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299883 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299892 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299900 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299907 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" exitCode=0 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299915 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" exitCode=143 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299922 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" exitCode=143 Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299930 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299947 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.299996 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300013 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300025 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300039 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300050 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300065 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300078 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300085 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300092 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300098 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300105 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300112 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300118 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300126 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300132 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300143 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300154 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300162 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300168 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300175 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300181 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300187 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300193 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300201 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300207 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300214 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300222 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300232 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300240 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300247 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300253 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300260 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300266 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300273 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300280 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300286 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300292 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300301 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7b8d" event={"ID":"f7418b1d-9f7d-48cd-aac4-6a1b85967841","Type":"ContainerDied","Data":"bbd0fe9ccb5d93b1e7493e04e18e8934312198958fb7d96c5c2e225dd0a7acab"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300311 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300319 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300326 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300332 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300339 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300346 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300353 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300359 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300365 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.300372 4998 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.331063 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7b8d"] Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.337178 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7b8d"] Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.340217 4998 scope.go:117] "RemoveContainer" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.358685 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377457 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-ovn\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377486 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377539 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-node-log\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377555 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-slash\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377569 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-bin\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377583 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-var-lib-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377600 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-netd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377606 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-node-log\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377651 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377619 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377576 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-ovn\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377704 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-netd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377711 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovn-node-metrics-cert\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377731 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-cni-bin\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377736 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-slash\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377744 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377704 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-var-lib-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377818 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-systemd-units\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377865 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-systemd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377895 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-script-lib\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377921 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-etc-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377954 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-run-systemd\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377868 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-systemd-units\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.377983 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-etc-openvswitch\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378096 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378158 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqpjj\" (UniqueName: \"kubernetes.io/projected/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-kube-api-access-nqpjj\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378220 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-log-socket\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378249 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-netns\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378325 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-kubelet\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378406 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-run-netns\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378432 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-config\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378505 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-env-overrides\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378629 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-kubelet\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378439 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-log-socket\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.378869 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.379304 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-env-overrides\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.379855 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-config\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.381355 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovn-node-metrics-cert\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.382360 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-ovnkube-script-lib\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.390530 4998 scope.go:117] "RemoveContainer" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.396180 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqpjj\" (UniqueName: \"kubernetes.io/projected/55c647c6-5bd7-4556-b2c7-0b520df1a3d9-kube-api-access-nqpjj\") pod \"ovnkube-node-q8m29\" (UID: \"55c647c6-5bd7-4556-b2c7-0b520df1a3d9\") " pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.402090 4998 scope.go:117] "RemoveContainer" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.414300 4998 scope.go:117] "RemoveContainer" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.429744 4998 scope.go:117] "RemoveContainer" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.433611 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7418b1d-9f7d-48cd-aac4-6a1b85967841" path="/var/lib/kubelet/pods/f7418b1d-9f7d-48cd-aac4-6a1b85967841/volumes" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.443268 4998 scope.go:117] "RemoveContainer" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.470098 4998 scope.go:117] "RemoveContainer" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.484576 4998 scope.go:117] "RemoveContainer" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.499211 4998 scope.go:117] "RemoveContainer" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.515508 4998 scope.go:117] "RemoveContainer" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.515977 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": container with ID starting with 0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284 not found: ID does not exist" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516029 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} err="failed to get container status \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": rpc error: code = NotFound desc = could not find container \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": container with ID starting with 0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516067 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.516392 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": container with ID starting with ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d not found: ID does not exist" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516428 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} err="failed to get container status \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": rpc error: code = NotFound desc = could not find container \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": container with ID starting with ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516467 4998 scope.go:117] "RemoveContainer" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.516895 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": container with ID starting with 943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e not found: ID does not exist" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516948 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} err="failed to get container status \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": rpc error: code = NotFound desc = could not find container \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": container with ID starting with 943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.516987 4998 scope.go:117] "RemoveContainer" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.517306 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": container with ID starting with 0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef not found: ID does not exist" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.517344 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} err="failed to get container status \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": rpc error: code = NotFound desc = could not find container \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": container with ID starting with 0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.517369 4998 scope.go:117] "RemoveContainer" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.517650 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": container with ID starting with 200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc not found: ID does not exist" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.517683 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} err="failed to get container status \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": rpc error: code = NotFound desc = could not find container \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": container with ID starting with 200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.517705 4998 scope.go:117] "RemoveContainer" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.518223 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": container with ID starting with 2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb not found: ID does not exist" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518258 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} err="failed to get container status \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": rpc error: code = NotFound desc = could not find container \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": container with ID starting with 2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518273 4998 scope.go:117] "RemoveContainer" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.518545 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": container with ID starting with 0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f not found: ID does not exist" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518568 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} err="failed to get container status \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": rpc error: code = NotFound desc = could not find container \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": container with ID starting with 0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518583 4998 scope.go:117] "RemoveContainer" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.518908 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": container with ID starting with 9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c not found: ID does not exist" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518927 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} err="failed to get container status \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": rpc error: code = NotFound desc = could not find container \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": container with ID starting with 9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.518939 4998 scope.go:117] "RemoveContainer" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.519167 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": container with ID starting with 3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254 not found: ID does not exist" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.519198 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} err="failed to get container status \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": rpc error: code = NotFound desc = could not find container \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": container with ID starting with 3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.519218 4998 scope.go:117] "RemoveContainer" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: E0203 06:58:30.520773 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": container with ID starting with fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c not found: ID does not exist" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.520829 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} err="failed to get container status \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": rpc error: code = NotFound desc = could not find container \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": container with ID starting with fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.520852 4998 scope.go:117] "RemoveContainer" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521181 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} err="failed to get container status \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": rpc error: code = NotFound desc = could not find container \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": container with ID starting with 0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521207 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521452 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} err="failed to get container status \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": rpc error: code = NotFound desc = could not find container \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": container with ID starting with ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521482 4998 scope.go:117] "RemoveContainer" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521659 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} err="failed to get container status \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": rpc error: code = NotFound desc = could not find container \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": container with ID starting with 943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521684 4998 scope.go:117] "RemoveContainer" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521941 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} err="failed to get container status \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": rpc error: code = NotFound desc = could not find container \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": container with ID starting with 0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.521963 4998 scope.go:117] "RemoveContainer" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.522544 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} err="failed to get container status \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": rpc error: code = NotFound desc = could not find container \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": container with ID starting with 200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.522573 4998 scope.go:117] "RemoveContainer" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.522841 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} err="failed to get container status \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": rpc error: code = NotFound desc = could not find container \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": container with ID starting with 2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.522864 4998 scope.go:117] "RemoveContainer" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523170 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} err="failed to get container status \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": rpc error: code = NotFound desc = could not find container \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": container with ID starting with 0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523196 4998 scope.go:117] "RemoveContainer" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523474 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} err="failed to get container status \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": rpc error: code = NotFound desc = could not find container \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": container with ID starting with 9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523541 4998 scope.go:117] "RemoveContainer" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523895 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} err="failed to get container status \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": rpc error: code = NotFound desc = could not find container \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": container with ID starting with 3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.523916 4998 scope.go:117] "RemoveContainer" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.524431 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} err="failed to get container status \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": rpc error: code = NotFound desc = could not find container \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": container with ID starting with fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.524491 4998 scope.go:117] "RemoveContainer" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.524817 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} err="failed to get container status \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": rpc error: code = NotFound desc = could not find container \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": container with ID starting with 0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.524842 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525171 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} err="failed to get container status \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": rpc error: code = NotFound desc = could not find container \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": container with ID starting with ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525197 4998 scope.go:117] "RemoveContainer" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525422 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} err="failed to get container status \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": rpc error: code = NotFound desc = could not find container \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": container with ID starting with 943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525449 4998 scope.go:117] "RemoveContainer" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525735 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} err="failed to get container status \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": rpc error: code = NotFound desc = could not find container \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": container with ID starting with 0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.525760 4998 scope.go:117] "RemoveContainer" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526008 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} err="failed to get container status \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": rpc error: code = NotFound desc = could not find container \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": container with ID starting with 200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526054 4998 scope.go:117] "RemoveContainer" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526336 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} err="failed to get container status \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": rpc error: code = NotFound desc = could not find container \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": container with ID starting with 2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526359 4998 scope.go:117] "RemoveContainer" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526934 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} err="failed to get container status \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": rpc error: code = NotFound desc = could not find container \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": container with ID starting with 0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.526988 4998 scope.go:117] "RemoveContainer" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.527358 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} err="failed to get container status \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": rpc error: code = NotFound desc = could not find container \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": container with ID starting with 9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.527386 4998 scope.go:117] "RemoveContainer" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.527628 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} err="failed to get container status \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": rpc error: code = NotFound desc = could not find container \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": container with ID starting with 3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.527654 4998 scope.go:117] "RemoveContainer" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.527977 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} err="failed to get container status \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": rpc error: code = NotFound desc = could not find container \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": container with ID starting with fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.528001 4998 scope.go:117] "RemoveContainer" containerID="0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.528265 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284"} err="failed to get container status \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": rpc error: code = NotFound desc = could not find container \"0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284\": container with ID starting with 0405b7789d47d71f97fe3697fd2132fe189ba35e6a1c87912ed75e8814a98284 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.528287 4998 scope.go:117] "RemoveContainer" containerID="ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.528538 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d"} err="failed to get container status \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": rpc error: code = NotFound desc = could not find container \"ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d\": container with ID starting with ab345d6c4fe2e5b5bbfc286634ac14e24468d235603c35cc3c573ad6b15d1b5d not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.528566 4998 scope.go:117] "RemoveContainer" containerID="943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.529396 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e"} err="failed to get container status \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": rpc error: code = NotFound desc = could not find container \"943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e\": container with ID starting with 943132f31c3d82b559d912aa937fefb43ae27b9b3aea07c4b2e3dbed7bc42d4e not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.529443 4998 scope.go:117] "RemoveContainer" containerID="0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.529742 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef"} err="failed to get container status \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": rpc error: code = NotFound desc = could not find container \"0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef\": container with ID starting with 0d7b5ea5682d1c45a8073f9464191e4b0abd837eda8dcfd1202ec508b138f2ef not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.529762 4998 scope.go:117] "RemoveContainer" containerID="200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.529990 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc"} err="failed to get container status \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": rpc error: code = NotFound desc = could not find container \"200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc\": container with ID starting with 200690a7883137127896498ee219a9451b5d4bf192c517b37dbd2c57ed2533bc not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.530025 4998 scope.go:117] "RemoveContainer" containerID="2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.530284 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb"} err="failed to get container status \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": rpc error: code = NotFound desc = could not find container \"2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb\": container with ID starting with 2a5edff0f2ee3d0cfd2b0a6aad845bf0fcbafd83ddaa88b15797f1f4eaff36bb not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.530321 4998 scope.go:117] "RemoveContainer" containerID="0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.530504 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.531445 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f"} err="failed to get container status \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": rpc error: code = NotFound desc = could not find container \"0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f\": container with ID starting with 0ee2282c24836649b14b4007863cb94fe093b9409a83a4db5037715c9b56101f not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.531467 4998 scope.go:117] "RemoveContainer" containerID="9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.531687 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c"} err="failed to get container status \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": rpc error: code = NotFound desc = could not find container \"9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c\": container with ID starting with 9a39dea3b3bce800da2e1a52f714cf96f6cfb9813147d0bc7bdaa9e563d8619c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.531722 4998 scope.go:117] "RemoveContainer" containerID="3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.532047 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254"} err="failed to get container status \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": rpc error: code = NotFound desc = could not find container \"3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254\": container with ID starting with 3eeb1e5b445237c3e9013dcaa36f00c490ffaaf52029a75f39ce3061a99cc254 not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.532068 4998 scope.go:117] "RemoveContainer" containerID="fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c" Feb 03 06:58:30 crc kubenswrapper[4998]: I0203 06:58:30.532409 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c"} err="failed to get container status \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": rpc error: code = NotFound desc = could not find container \"fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c\": container with ID starting with fec43f82e4c300fb77c96a89428dec11dbe093a3bd06e0a68979edbf6fcb001c not found: ID does not exist" Feb 03 06:58:30 crc kubenswrapper[4998]: W0203 06:58:30.547475 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55c647c6_5bd7_4556_b2c7_0b520df1a3d9.slice/crio-ce0873d8967c716fcae862cf1079ee2e5e7b8ca274284067c218536957c46508 WatchSource:0}: Error finding container ce0873d8967c716fcae862cf1079ee2e5e7b8ca274284067c218536957c46508: Status 404 returned error can't find the container with id ce0873d8967c716fcae862cf1079ee2e5e7b8ca274284067c218536957c46508 Feb 03 06:58:31 crc kubenswrapper[4998]: I0203 06:58:31.306339 4998 generic.go:334] "Generic (PLEG): container finished" podID="55c647c6-5bd7-4556-b2c7-0b520df1a3d9" containerID="204a9d836a1e9d83362396613190ec8d94322d16974272a4c58374c1b8d7d74a" exitCode=0 Feb 03 06:58:31 crc kubenswrapper[4998]: I0203 06:58:31.306479 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerDied","Data":"204a9d836a1e9d83362396613190ec8d94322d16974272a4c58374c1b8d7d74a"} Feb 03 06:58:31 crc kubenswrapper[4998]: I0203 06:58:31.306707 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"ce0873d8967c716fcae862cf1079ee2e5e7b8ca274284067c218536957c46508"} Feb 03 06:58:31 crc kubenswrapper[4998]: I0203 06:58:31.309182 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dm4vz_2cba0dd3-b238-4ad4-9517-e2bf7d30b635/kube-multus/1.log" Feb 03 06:58:31 crc kubenswrapper[4998]: I0203 06:58:31.309236 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dm4vz" event={"ID":"2cba0dd3-b238-4ad4-9517-e2bf7d30b635","Type":"ContainerStarted","Data":"d2057ab21203bbfbd5f0f23088d189de81d5dcffd520be2902a9101af5c8e344"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.318774 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"175a52ebcbdca00d5362fbcb03f87ee3ae31500e63b8b59f27953520e4bf7690"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.319544 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"3fc777bf6ec8d5375d910c703ef130154bf548ea4b93766a8dcd5a686fcb2ec3"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.319569 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"d243244922ac9da6be65821582d5c04f08b3ef8f53b11d7e2138c2309abf945c"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.319586 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"0f4aa93ce8f8ca3ef7649be51061398efa9d17bc56691a87c8297d881a5372fa"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.319604 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"14b9259890a9a9a97f05c5170f9f843507711cd836a5be150477656291bbe16c"} Feb 03 06:58:32 crc kubenswrapper[4998]: I0203 06:58:32.319620 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"5b8785d45ecdfdb29c9c19ae3733ebc0d1bf91daeedc13c2d1f87c166402e385"} Feb 03 06:58:34 crc kubenswrapper[4998]: I0203 06:58:34.337110 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"fdfde4f94ef6e765c2ca138c6144abc03cab4c86f03179300aec0f09e8fa8fcc"} Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.363160 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" event={"ID":"55c647c6-5bd7-4556-b2c7-0b520df1a3d9","Type":"ContainerStarted","Data":"c126365470434e8bf79219730013e95f1e16171762e8128847399038de7d68b3"} Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.363770 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.363810 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.363823 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.393896 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.403131 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.413982 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" podStartSLOduration=7.413954867 podStartE2EDuration="7.413954867s" podCreationTimestamp="2026-02-03 06:58:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:58:37.407889484 +0000 UTC m=+755.694583290" watchObservedRunningTime="2026-02-03 06:58:37.413954867 +0000 UTC m=+755.700648713" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.753801 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-p7mlq"] Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.754527 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.756201 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.756757 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.756926 4998 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tk9nc" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.757000 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.777356 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngml7\" (UniqueName: \"kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.777465 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.777500 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.879488 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.879540 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.879591 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngml7\" (UniqueName: \"kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.879997 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.880397 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:37 crc kubenswrapper[4998]: I0203 06:58:37.898375 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngml7\" (UniqueName: \"kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7\") pod \"crc-storage-crc-p7mlq\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: I0203 06:58:38.070923 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.107700 4998 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(e64694d00edf9b205f7741908c345f6925b02ad5e42e5fc23cf1042ada90fabd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.107779 4998 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(e64694d00edf9b205f7741908c345f6925b02ad5e42e5fc23cf1042ada90fabd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.107813 4998 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(e64694d00edf9b205f7741908c345f6925b02ad5e42e5fc23cf1042ada90fabd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.107861 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-p7mlq_crc-storage(53366df6-d5e6-42e3-883e-8050cb593284)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-p7mlq_crc-storage(53366df6-d5e6-42e3-883e-8050cb593284)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(e64694d00edf9b205f7741908c345f6925b02ad5e42e5fc23cf1042ada90fabd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-p7mlq" podUID="53366df6-d5e6-42e3-883e-8050cb593284" Feb 03 06:58:38 crc kubenswrapper[4998]: I0203 06:58:38.131669 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-p7mlq"] Feb 03 06:58:38 crc kubenswrapper[4998]: I0203 06:58:38.367266 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: I0203 06:58:38.367706 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.390611 4998 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(00aa1f2280941e495f048f959ac11a0068a3e797b5bc5c8e200f7f36ea81c508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.390660 4998 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(00aa1f2280941e495f048f959ac11a0068a3e797b5bc5c8e200f7f36ea81c508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.390679 4998 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(00aa1f2280941e495f048f959ac11a0068a3e797b5bc5c8e200f7f36ea81c508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:38 crc kubenswrapper[4998]: E0203 06:58:38.390726 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-p7mlq_crc-storage(53366df6-d5e6-42e3-883e-8050cb593284)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-p7mlq_crc-storage(53366df6-d5e6-42e3-883e-8050cb593284)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-p7mlq_crc-storage_53366df6-d5e6-42e3-883e-8050cb593284_0(00aa1f2280941e495f048f959ac11a0068a3e797b5bc5c8e200f7f36ea81c508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-p7mlq" podUID="53366df6-d5e6-42e3-883e-8050cb593284" Feb 03 06:58:42 crc kubenswrapper[4998]: I0203 06:58:42.754588 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:58:42 crc kubenswrapper[4998]: I0203 06:58:42.755259 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:58:49 crc kubenswrapper[4998]: I0203 06:58:49.427270 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:49 crc kubenswrapper[4998]: I0203 06:58:49.428108 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:49 crc kubenswrapper[4998]: I0203 06:58:49.810679 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-p7mlq"] Feb 03 06:58:49 crc kubenswrapper[4998]: W0203 06:58:49.821562 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53366df6_d5e6_42e3_883e_8050cb593284.slice/crio-ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946 WatchSource:0}: Error finding container ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946: Status 404 returned error can't find the container with id ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946 Feb 03 06:58:49 crc kubenswrapper[4998]: I0203 06:58:49.824331 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 06:58:50 crc kubenswrapper[4998]: I0203 06:58:50.437312 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p7mlq" event={"ID":"53366df6-d5e6-42e3-883e-8050cb593284","Type":"ContainerStarted","Data":"ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946"} Feb 03 06:58:52 crc kubenswrapper[4998]: I0203 06:58:52.453941 4998 generic.go:334] "Generic (PLEG): container finished" podID="53366df6-d5e6-42e3-883e-8050cb593284" containerID="a8e4648698b19bf57f7161c2e808ea88424ddabb968ad3769b7f072296c0b0d0" exitCode=0 Feb 03 06:58:52 crc kubenswrapper[4998]: I0203 06:58:52.454058 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p7mlq" event={"ID":"53366df6-d5e6-42e3-883e-8050cb593284","Type":"ContainerDied","Data":"a8e4648698b19bf57f7161c2e808ea88424ddabb968ad3769b7f072296c0b0d0"} Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.746535 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.790360 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngml7\" (UniqueName: \"kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7\") pod \"53366df6-d5e6-42e3-883e-8050cb593284\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.790424 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage\") pod \"53366df6-d5e6-42e3-883e-8050cb593284\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.790524 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt\") pod \"53366df6-d5e6-42e3-883e-8050cb593284\" (UID: \"53366df6-d5e6-42e3-883e-8050cb593284\") " Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.790852 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "53366df6-d5e6-42e3-883e-8050cb593284" (UID: "53366df6-d5e6-42e3-883e-8050cb593284"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.791063 4998 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/53366df6-d5e6-42e3-883e-8050cb593284-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.796207 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7" (OuterVolumeSpecName: "kube-api-access-ngml7") pod "53366df6-d5e6-42e3-883e-8050cb593284" (UID: "53366df6-d5e6-42e3-883e-8050cb593284"). InnerVolumeSpecName "kube-api-access-ngml7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.812317 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "53366df6-d5e6-42e3-883e-8050cb593284" (UID: "53366df6-d5e6-42e3-883e-8050cb593284"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.891530 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngml7\" (UniqueName: \"kubernetes.io/projected/53366df6-d5e6-42e3-883e-8050cb593284-kube-api-access-ngml7\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:53 crc kubenswrapper[4998]: I0203 06:58:53.891567 4998 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/53366df6-d5e6-42e3-883e-8050cb593284-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 03 06:58:54 crc kubenswrapper[4998]: I0203 06:58:54.473119 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p7mlq" event={"ID":"53366df6-d5e6-42e3-883e-8050cb593284","Type":"ContainerDied","Data":"ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946"} Feb 03 06:58:54 crc kubenswrapper[4998]: I0203 06:58:54.473248 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebd4f52d0ab34c90c95d008f8e764b5cccc4fda83678ff2bbb927f10204fc946" Feb 03 06:58:54 crc kubenswrapper[4998]: I0203 06:58:54.473186 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p7mlq" Feb 03 06:59:00 crc kubenswrapper[4998]: I0203 06:59:00.553197 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q8m29" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.544799 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24"] Feb 03 06:59:01 crc kubenswrapper[4998]: E0203 06:59:01.545375 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53366df6-d5e6-42e3-883e-8050cb593284" containerName="storage" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.545393 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="53366df6-d5e6-42e3-883e-8050cb593284" containerName="storage" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.545541 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="53366df6-d5e6-42e3-883e-8050cb593284" containerName="storage" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.546364 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.556402 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.558071 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24"] Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.588200 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.588318 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhzsz\" (UniqueName: \"kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.588357 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.689971 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.690032 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.690094 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhzsz\" (UniqueName: \"kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.690719 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.690770 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.708156 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhzsz\" (UniqueName: \"kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:01 crc kubenswrapper[4998]: I0203 06:59:01.861255 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:02 crc kubenswrapper[4998]: I0203 06:59:02.260603 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24"] Feb 03 06:59:02 crc kubenswrapper[4998]: W0203 06:59:02.267936 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7567f7dc_dd95_4c33_bf94_6a5314c9ec8d.slice/crio-605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9 WatchSource:0}: Error finding container 605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9: Status 404 returned error can't find the container with id 605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9 Feb 03 06:59:02 crc kubenswrapper[4998]: I0203 06:59:02.518358 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerStarted","Data":"e42641af3635589fcec9af81d59dfad100070591ad4ab902fb8fd1b04b6b9022"} Feb 03 06:59:02 crc kubenswrapper[4998]: I0203 06:59:02.518431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerStarted","Data":"605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9"} Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.528097 4998 generic.go:334] "Generic (PLEG): container finished" podID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerID="e42641af3635589fcec9af81d59dfad100070591ad4ab902fb8fd1b04b6b9022" exitCode=0 Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.528188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerDied","Data":"e42641af3635589fcec9af81d59dfad100070591ad4ab902fb8fd1b04b6b9022"} Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.732558 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.737151 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.745416 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.818568 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kw59\" (UniqueName: \"kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.818701 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.818765 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.919458 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kw59\" (UniqueName: \"kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.919518 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.919550 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.920025 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.920471 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:03 crc kubenswrapper[4998]: I0203 06:59:03.938141 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kw59\" (UniqueName: \"kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59\") pod \"redhat-operators-8pmkt\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:04 crc kubenswrapper[4998]: I0203 06:59:04.064423 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:04 crc kubenswrapper[4998]: I0203 06:59:04.278150 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:04 crc kubenswrapper[4998]: I0203 06:59:04.539136 4998 generic.go:334] "Generic (PLEG): container finished" podID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerID="457323dfc7861a6d269243eadc8812bc9e0545d0ba940695db1e71a3516c0253" exitCode=0 Feb 03 06:59:04 crc kubenswrapper[4998]: I0203 06:59:04.539183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerDied","Data":"457323dfc7861a6d269243eadc8812bc9e0545d0ba940695db1e71a3516c0253"} Feb 03 06:59:04 crc kubenswrapper[4998]: I0203 06:59:04.539211 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerStarted","Data":"faf0ffedd74f71c3485ca87b1c26a02b591961ebcaf4584ae874ef3838dceeb5"} Feb 03 06:59:05 crc kubenswrapper[4998]: I0203 06:59:05.553528 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerStarted","Data":"d42a35306b5f96d0459fbbce3beb73f0c8bc7b93a9c8206e0eb59eacca408a2e"} Feb 03 06:59:05 crc kubenswrapper[4998]: I0203 06:59:05.560497 4998 generic.go:334] "Generic (PLEG): container finished" podID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerID="085839b4f7a36acee63bba730f426884cf1d0c54605ff3774a0a0b27e22e35fa" exitCode=0 Feb 03 06:59:05 crc kubenswrapper[4998]: I0203 06:59:05.560550 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerDied","Data":"085839b4f7a36acee63bba730f426884cf1d0c54605ff3774a0a0b27e22e35fa"} Feb 03 06:59:06 crc kubenswrapper[4998]: I0203 06:59:06.567845 4998 generic.go:334] "Generic (PLEG): container finished" podID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerID="d42a35306b5f96d0459fbbce3beb73f0c8bc7b93a9c8206e0eb59eacca408a2e" exitCode=0 Feb 03 06:59:06 crc kubenswrapper[4998]: I0203 06:59:06.567921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerDied","Data":"d42a35306b5f96d0459fbbce3beb73f0c8bc7b93a9c8206e0eb59eacca408a2e"} Feb 03 06:59:06 crc kubenswrapper[4998]: I0203 06:59:06.572135 4998 generic.go:334] "Generic (PLEG): container finished" podID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerID="d641afc565949915aada9d07c9c9b2ee823e927ef2e0254749ecf0cc87216fd9" exitCode=0 Feb 03 06:59:06 crc kubenswrapper[4998]: I0203 06:59:06.572175 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerDied","Data":"d641afc565949915aada9d07c9c9b2ee823e927ef2e0254749ecf0cc87216fd9"} Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.581242 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerStarted","Data":"d05b562b43117ac66cd4791d5d9fa4cd381fbdf7e3997684c7aecc1f61acd679"} Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.603190 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8pmkt" podStartSLOduration=2.109577533 podStartE2EDuration="4.60316887s" podCreationTimestamp="2026-02-03 06:59:03 +0000 UTC" firstStartedPulling="2026-02-03 06:59:04.576562257 +0000 UTC m=+782.863256063" lastFinishedPulling="2026-02-03 06:59:07.070153584 +0000 UTC m=+785.356847400" observedRunningTime="2026-02-03 06:59:07.601588835 +0000 UTC m=+785.888282701" watchObservedRunningTime="2026-02-03 06:59:07.60316887 +0000 UTC m=+785.889862686" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.852227 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.868881 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle\") pod \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.868965 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util\") pod \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.869014 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhzsz\" (UniqueName: \"kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz\") pod \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\" (UID: \"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d\") " Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.872241 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle" (OuterVolumeSpecName: "bundle") pod "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" (UID: "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.874484 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz" (OuterVolumeSpecName: "kube-api-access-lhzsz") pod "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" (UID: "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d"). InnerVolumeSpecName "kube-api-access-lhzsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.919107 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util" (OuterVolumeSpecName: "util") pod "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" (UID: "7567f7dc-dd95-4c33-bf94-6a5314c9ec8d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.970526 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhzsz\" (UniqueName: \"kubernetes.io/projected/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-kube-api-access-lhzsz\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.970554 4998 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:07 crc kubenswrapper[4998]: I0203 06:59:07.970578 4998 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7567f7dc-dd95-4c33-bf94-6a5314c9ec8d-util\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:08 crc kubenswrapper[4998]: I0203 06:59:08.590020 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" event={"ID":"7567f7dc-dd95-4c33-bf94-6a5314c9ec8d","Type":"ContainerDied","Data":"605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9"} Feb 03 06:59:08 crc kubenswrapper[4998]: I0203 06:59:08.590049 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24" Feb 03 06:59:08 crc kubenswrapper[4998]: I0203 06:59:08.590073 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="605eaf9e7396563eca3ddfb0f1c523016b29e4cba0bac49ae9a3c2641f7e8cb9" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.427449 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wswsx"] Feb 03 06:59:11 crc kubenswrapper[4998]: E0203 06:59:11.427686 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="pull" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.427701 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="pull" Feb 03 06:59:11 crc kubenswrapper[4998]: E0203 06:59:11.427720 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="util" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.427727 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="util" Feb 03 06:59:11 crc kubenswrapper[4998]: E0203 06:59:11.427743 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="extract" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.427751 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="extract" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.427886 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7567f7dc-dd95-4c33-bf94-6a5314c9ec8d" containerName="extract" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.428345 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.429751 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-dbvvb" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.433579 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.436127 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.437551 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wswsx"] Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.512300 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj8t7\" (UniqueName: \"kubernetes.io/projected/591273c0-600c-4fd3-baee-a1aa3e943ed1-kube-api-access-xj8t7\") pod \"nmstate-operator-646758c888-wswsx\" (UID: \"591273c0-600c-4fd3-baee-a1aa3e943ed1\") " pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.613170 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj8t7\" (UniqueName: \"kubernetes.io/projected/591273c0-600c-4fd3-baee-a1aa3e943ed1-kube-api-access-xj8t7\") pod \"nmstate-operator-646758c888-wswsx\" (UID: \"591273c0-600c-4fd3-baee-a1aa3e943ed1\") " pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.651675 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj8t7\" (UniqueName: \"kubernetes.io/projected/591273c0-600c-4fd3-baee-a1aa3e943ed1-kube-api-access-xj8t7\") pod \"nmstate-operator-646758c888-wswsx\" (UID: \"591273c0-600c-4fd3-baee-a1aa3e943ed1\") " pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.741763 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" Feb 03 06:59:11 crc kubenswrapper[4998]: I0203 06:59:11.912368 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-wswsx"] Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.608045 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" event={"ID":"591273c0-600c-4fd3-baee-a1aa3e943ed1","Type":"ContainerStarted","Data":"771cf1874cc43fbf8c1e2660dfda26ed81657da7255c0bf823ad6186582da5a6"} Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.754396 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.754454 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.754495 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.755087 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 06:59:12 crc kubenswrapper[4998]: I0203 06:59:12.755154 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332" gracePeriod=600 Feb 03 06:59:13 crc kubenswrapper[4998]: I0203 06:59:13.618055 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332" exitCode=0 Feb 03 06:59:13 crc kubenswrapper[4998]: I0203 06:59:13.618109 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332"} Feb 03 06:59:13 crc kubenswrapper[4998]: I0203 06:59:13.618150 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576"} Feb 03 06:59:13 crc kubenswrapper[4998]: I0203 06:59:13.618168 4998 scope.go:117] "RemoveContainer" containerID="c2e1ee8ca9dcaa42720784bd3014dda6a6aa2a520029e1b726ecb558f7bcee0e" Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.065108 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.065386 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.108776 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.628268 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" event={"ID":"591273c0-600c-4fd3-baee-a1aa3e943ed1","Type":"ContainerStarted","Data":"afb8a74886904ea747db5f5312af4f4e5e2999eb8f43046ec38b82a2348deff7"} Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.645521 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-wswsx" podStartSLOduration=1.397005455 podStartE2EDuration="3.645500024s" podCreationTimestamp="2026-02-03 06:59:11 +0000 UTC" firstStartedPulling="2026-02-03 06:59:11.935694875 +0000 UTC m=+790.222388681" lastFinishedPulling="2026-02-03 06:59:14.184189444 +0000 UTC m=+792.470883250" observedRunningTime="2026-02-03 06:59:14.643547578 +0000 UTC m=+792.930241404" watchObservedRunningTime="2026-02-03 06:59:14.645500024 +0000 UTC m=+792.932193840" Feb 03 06:59:14 crc kubenswrapper[4998]: I0203 06:59:14.677259 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:16 crc kubenswrapper[4998]: I0203 06:59:16.113484 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:16 crc kubenswrapper[4998]: I0203 06:59:16.641279 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8pmkt" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="registry-server" containerID="cri-o://d05b562b43117ac66cd4791d5d9fa4cd381fbdf7e3997684c7aecc1f61acd679" gracePeriod=2 Feb 03 06:59:18 crc kubenswrapper[4998]: I0203 06:59:18.657078 4998 generic.go:334] "Generic (PLEG): container finished" podID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerID="d05b562b43117ac66cd4791d5d9fa4cd381fbdf7e3997684c7aecc1f61acd679" exitCode=0 Feb 03 06:59:18 crc kubenswrapper[4998]: I0203 06:59:18.657191 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerDied","Data":"d05b562b43117ac66cd4791d5d9fa4cd381fbdf7e3997684c7aecc1f61acd679"} Feb 03 06:59:18 crc kubenswrapper[4998]: I0203 06:59:18.851830 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.003432 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kw59\" (UniqueName: \"kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59\") pod \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.003559 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities\") pod \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.003595 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content\") pod \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\" (UID: \"4004bfad-0831-46a9-8ac9-73f533c5e9a7\") " Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.004537 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities" (OuterVolumeSpecName: "utilities") pod "4004bfad-0831-46a9-8ac9-73f533c5e9a7" (UID: "4004bfad-0831-46a9-8ac9-73f533c5e9a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.009005 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59" (OuterVolumeSpecName: "kube-api-access-5kw59") pod "4004bfad-0831-46a9-8ac9-73f533c5e9a7" (UID: "4004bfad-0831-46a9-8ac9-73f533c5e9a7"). InnerVolumeSpecName "kube-api-access-5kw59". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.106116 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kw59\" (UniqueName: \"kubernetes.io/projected/4004bfad-0831-46a9-8ac9-73f533c5e9a7-kube-api-access-5kw59\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.106153 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.119648 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4004bfad-0831-46a9-8ac9-73f533c5e9a7" (UID: "4004bfad-0831-46a9-8ac9-73f533c5e9a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.207130 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4004bfad-0831-46a9-8ac9-73f533c5e9a7-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.664916 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pmkt" event={"ID":"4004bfad-0831-46a9-8ac9-73f533c5e9a7","Type":"ContainerDied","Data":"faf0ffedd74f71c3485ca87b1c26a02b591961ebcaf4584ae874ef3838dceeb5"} Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.664956 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pmkt" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.665459 4998 scope.go:117] "RemoveContainer" containerID="d05b562b43117ac66cd4791d5d9fa4cd381fbdf7e3997684c7aecc1f61acd679" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.686059 4998 scope.go:117] "RemoveContainer" containerID="d42a35306b5f96d0459fbbce3beb73f0c8bc7b93a9c8206e0eb59eacca408a2e" Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.705211 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.719019 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8pmkt"] Feb 03 06:59:19 crc kubenswrapper[4998]: I0203 06:59:19.724562 4998 scope.go:117] "RemoveContainer" containerID="457323dfc7861a6d269243eadc8812bc9e0545d0ba940695db1e71a3516c0253" Feb 03 06:59:20 crc kubenswrapper[4998]: I0203 06:59:20.437292 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" path="/var/lib/kubelet/pods/4004bfad-0831-46a9-8ac9-73f533c5e9a7/volumes" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.897355 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-t7wvp"] Feb 03 06:59:21 crc kubenswrapper[4998]: E0203 06:59:21.897596 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="extract-content" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.897609 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="extract-content" Feb 03 06:59:21 crc kubenswrapper[4998]: E0203 06:59:21.897631 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="extract-utilities" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.897639 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="extract-utilities" Feb 03 06:59:21 crc kubenswrapper[4998]: E0203 06:59:21.897647 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="registry-server" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.897656 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="registry-server" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.897797 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4004bfad-0831-46a9-8ac9-73f533c5e9a7" containerName="registry-server" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.898436 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.901083 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-jqkfn" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.906135 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k"] Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.907509 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.913487 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.916202 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-wsbwf"] Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.917094 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.923502 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k"] Feb 03 06:59:21 crc kubenswrapper[4998]: I0203 06:59:21.931138 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-t7wvp"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.015195 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.015984 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.027225 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-s84nt" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.027450 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.031628 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.032225 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046187 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eff39f1c-011a-4719-8821-8901bd649ec4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046249 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-dbus-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046282 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzmjj\" (UniqueName: \"kubernetes.io/projected/62aaf85e-b285-46ee-94b9-d29f6125d823-kube-api-access-tzmjj\") pod \"nmstate-metrics-54757c584b-t7wvp\" (UID: \"62aaf85e-b285-46ee-94b9-d29f6125d823\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046328 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-nmstate-lock\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046361 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-ovs-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046399 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fst5l\" (UniqueName: \"kubernetes.io/projected/0ed5d979-36b4-45e2-8070-88873509fff7-kube-api-access-fst5l\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.046480 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcbtl\" (UniqueName: \"kubernetes.io/projected/eff39f1c-011a-4719-8821-8901bd649ec4-kube-api-access-kcbtl\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.147887 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzmjj\" (UniqueName: \"kubernetes.io/projected/62aaf85e-b285-46ee-94b9-d29f6125d823-kube-api-access-tzmjj\") pod \"nmstate-metrics-54757c584b-t7wvp\" (UID: \"62aaf85e-b285-46ee-94b9-d29f6125d823\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.147948 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-nmstate-lock\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.147974 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-ovs-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.147999 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fst5l\" (UniqueName: \"kubernetes.io/projected/0ed5d979-36b4-45e2-8070-88873509fff7-kube-api-access-fst5l\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148024 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6c2v\" (UniqueName: \"kubernetes.io/projected/7f68614c-50cc-4234-916c-73a291c112b9-kube-api-access-m6c2v\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148047 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcbtl\" (UniqueName: \"kubernetes.io/projected/eff39f1c-011a-4719-8821-8901bd649ec4-kube-api-access-kcbtl\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148084 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7f68614c-50cc-4234-916c-73a291c112b9-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148086 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-nmstate-lock\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148108 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148228 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eff39f1c-011a-4719-8821-8901bd649ec4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148107 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-ovs-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148316 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-dbus-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.148585 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0ed5d979-36b4-45e2-8070-88873509fff7-dbus-socket\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.164498 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eff39f1c-011a-4719-8821-8901bd649ec4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.169474 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzmjj\" (UniqueName: \"kubernetes.io/projected/62aaf85e-b285-46ee-94b9-d29f6125d823-kube-api-access-tzmjj\") pod \"nmstate-metrics-54757c584b-t7wvp\" (UID: \"62aaf85e-b285-46ee-94b9-d29f6125d823\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.172197 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcbtl\" (UniqueName: \"kubernetes.io/projected/eff39f1c-011a-4719-8821-8901bd649ec4-kube-api-access-kcbtl\") pod \"nmstate-webhook-8474b5b9d8-7t78k\" (UID: \"eff39f1c-011a-4719-8821-8901bd649ec4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.178207 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fst5l\" (UniqueName: \"kubernetes.io/projected/0ed5d979-36b4-45e2-8070-88873509fff7-kube-api-access-fst5l\") pod \"nmstate-handler-wsbwf\" (UID: \"0ed5d979-36b4-45e2-8070-88873509fff7\") " pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.215406 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.223330 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5b5f5df768-488nq"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.224155 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.224549 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.233633 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.237192 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b5f5df768-488nq"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.249395 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7f68614c-50cc-4234-916c-73a291c112b9-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.249441 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.249530 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6c2v\" (UniqueName: \"kubernetes.io/projected/7f68614c-50cc-4234-916c-73a291c112b9-kube-api-access-m6c2v\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.251102 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7f68614c-50cc-4234-916c-73a291c112b9-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: E0203 06:59:22.251171 4998 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Feb 03 06:59:22 crc kubenswrapper[4998]: E0203 06:59:22.251208 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert podName:7f68614c-50cc-4234-916c-73a291c112b9 nodeName:}" failed. No retries permitted until 2026-02-03 06:59:22.751195768 +0000 UTC m=+801.037889574 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-d7qtx" (UID: "7f68614c-50cc-4234-916c-73a291c112b9") : secret "plugin-serving-cert" not found Feb 03 06:59:22 crc kubenswrapper[4998]: W0203 06:59:22.273141 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ed5d979_36b4_45e2_8070_88873509fff7.slice/crio-bed99b9b01fd8cbdf21fe1c0e6b29e57992befb3bd8b719231a6b758133b1b19 WatchSource:0}: Error finding container bed99b9b01fd8cbdf21fe1c0e6b29e57992befb3bd8b719231a6b758133b1b19: Status 404 returned error can't find the container with id bed99b9b01fd8cbdf21fe1c0e6b29e57992befb3bd8b719231a6b758133b1b19 Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.276911 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6c2v\" (UniqueName: \"kubernetes.io/projected/7f68614c-50cc-4234-916c-73a291c112b9-kube-api-access-m6c2v\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.350952 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd4f2\" (UniqueName: \"kubernetes.io/projected/17631c74-a0d1-475d-b782-6e387d1f3a30-kube-api-access-rd4f2\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351296 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-oauth-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351326 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351342 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-service-ca\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351365 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-oauth-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351392 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-trusted-ca-bundle\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.351420 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-console-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.441281 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k"] Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452461 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd4f2\" (UniqueName: \"kubernetes.io/projected/17631c74-a0d1-475d-b782-6e387d1f3a30-kube-api-access-rd4f2\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452497 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-oauth-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452525 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452541 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-service-ca\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452562 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-oauth-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452586 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-trusted-ca-bundle\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.452612 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-console-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.453667 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-console-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.454712 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-service-ca\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.454751 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-oauth-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.455594 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17631c74-a0d1-475d-b782-6e387d1f3a30-trusted-ca-bundle\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.457629 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-oauth-config\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.457644 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/17631c74-a0d1-475d-b782-6e387d1f3a30-console-serving-cert\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.464157 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-t7wvp"] Feb 03 06:59:22 crc kubenswrapper[4998]: W0203 06:59:22.468594 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62aaf85e_b285_46ee_94b9_d29f6125d823.slice/crio-e99747d079d54132d01535390d63bb472d17a8f669d323c78c1bca381e5829e3 WatchSource:0}: Error finding container e99747d079d54132d01535390d63bb472d17a8f669d323c78c1bca381e5829e3: Status 404 returned error can't find the container with id e99747d079d54132d01535390d63bb472d17a8f669d323c78c1bca381e5829e3 Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.473025 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd4f2\" (UniqueName: \"kubernetes.io/projected/17631c74-a0d1-475d-b782-6e387d1f3a30-kube-api-access-rd4f2\") pod \"console-5b5f5df768-488nq\" (UID: \"17631c74-a0d1-475d-b782-6e387d1f3a30\") " pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.582889 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.680738 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" event={"ID":"eff39f1c-011a-4719-8821-8901bd649ec4","Type":"ContainerStarted","Data":"43b9149c2475c4e064ee1cbd4621aad4413b3890cc3dbbe1e3d2fdabaee64ed5"} Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.681796 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" event={"ID":"62aaf85e-b285-46ee-94b9-d29f6125d823","Type":"ContainerStarted","Data":"e99747d079d54132d01535390d63bb472d17a8f669d323c78c1bca381e5829e3"} Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.682731 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wsbwf" event={"ID":"0ed5d979-36b4-45e2-8070-88873509fff7","Type":"ContainerStarted","Data":"bed99b9b01fd8cbdf21fe1c0e6b29e57992befb3bd8b719231a6b758133b1b19"} Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.757246 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.760356 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f68614c-50cc-4234-916c-73a291c112b9-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-d7qtx\" (UID: \"7f68614c-50cc-4234-916c-73a291c112b9\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.800879 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b5f5df768-488nq"] Feb 03 06:59:22 crc kubenswrapper[4998]: W0203 06:59:22.808365 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17631c74_a0d1_475d_b782_6e387d1f3a30.slice/crio-e1c197b527d65bf9531a16d54a2429aa5bb2c6c2c73ab57804182576ba43904e WatchSource:0}: Error finding container e1c197b527d65bf9531a16d54a2429aa5bb2c6c2c73ab57804182576ba43904e: Status 404 returned error can't find the container with id e1c197b527d65bf9531a16d54a2429aa5bb2c6c2c73ab57804182576ba43904e Feb 03 06:59:22 crc kubenswrapper[4998]: I0203 06:59:22.941149 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" Feb 03 06:59:23 crc kubenswrapper[4998]: I0203 06:59:23.106476 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx"] Feb 03 06:59:23 crc kubenswrapper[4998]: W0203 06:59:23.114952 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f68614c_50cc_4234_916c_73a291c112b9.slice/crio-e54056a2a8430893cb16dc484c224b041cbaabdaf686cc4212834b737e865f63 WatchSource:0}: Error finding container e54056a2a8430893cb16dc484c224b041cbaabdaf686cc4212834b737e865f63: Status 404 returned error can't find the container with id e54056a2a8430893cb16dc484c224b041cbaabdaf686cc4212834b737e865f63 Feb 03 06:59:23 crc kubenswrapper[4998]: I0203 06:59:23.689431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" event={"ID":"7f68614c-50cc-4234-916c-73a291c112b9","Type":"ContainerStarted","Data":"e54056a2a8430893cb16dc484c224b041cbaabdaf686cc4212834b737e865f63"} Feb 03 06:59:23 crc kubenswrapper[4998]: I0203 06:59:23.690871 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b5f5df768-488nq" event={"ID":"17631c74-a0d1-475d-b782-6e387d1f3a30","Type":"ContainerStarted","Data":"cdcae165c0d6d204d82de8a4fc489d0287a327a87e87956d1d850b4e7fc9735d"} Feb 03 06:59:23 crc kubenswrapper[4998]: I0203 06:59:23.690901 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b5f5df768-488nq" event={"ID":"17631c74-a0d1-475d-b782-6e387d1f3a30","Type":"ContainerStarted","Data":"e1c197b527d65bf9531a16d54a2429aa5bb2c6c2c73ab57804182576ba43904e"} Feb 03 06:59:23 crc kubenswrapper[4998]: I0203 06:59:23.717578 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5b5f5df768-488nq" podStartSLOduration=1.7175550990000001 podStartE2EDuration="1.717555099s" podCreationTimestamp="2026-02-03 06:59:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 06:59:23.712293309 +0000 UTC m=+801.998987125" watchObservedRunningTime="2026-02-03 06:59:23.717555099 +0000 UTC m=+802.004248905" Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.703440 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" event={"ID":"62aaf85e-b285-46ee-94b9-d29f6125d823","Type":"ContainerStarted","Data":"abd2bc719879be57ffc30f5d0db2400e72e8369a5209d81ee161d7b1f0c97c9b"} Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.705471 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" event={"ID":"7f68614c-50cc-4234-916c-73a291c112b9","Type":"ContainerStarted","Data":"b308ea2a9aa211a5cbfa448b6c25edd4b2d15ed239446cf3c000413125fa6c73"} Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.706678 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wsbwf" event={"ID":"0ed5d979-36b4-45e2-8070-88873509fff7","Type":"ContainerStarted","Data":"d1a381b7b00adb460b897d2faca868771e5c1b6092c0db6dc8c2e8bd16fac7d0"} Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.706852 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.709751 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" event={"ID":"eff39f1c-011a-4719-8821-8901bd649ec4","Type":"ContainerStarted","Data":"3b52037b15fdb8abc40352708b8860ff12715dd474c19b91e1fd98a1089d8de6"} Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.709936 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.721590 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-d7qtx" podStartSLOduration=1.6652558069999999 podStartE2EDuration="3.72157311s" podCreationTimestamp="2026-02-03 06:59:22 +0000 UTC" firstStartedPulling="2026-02-03 06:59:23.116731988 +0000 UTC m=+801.403425794" lastFinishedPulling="2026-02-03 06:59:25.173049271 +0000 UTC m=+803.459743097" observedRunningTime="2026-02-03 06:59:25.718205164 +0000 UTC m=+804.004898990" watchObservedRunningTime="2026-02-03 06:59:25.72157311 +0000 UTC m=+804.008266916" Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.738825 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" podStartSLOduration=2.008292252 podStartE2EDuration="4.738806092s" podCreationTimestamp="2026-02-03 06:59:21 +0000 UTC" firstStartedPulling="2026-02-03 06:59:22.442674645 +0000 UTC m=+800.729368461" lastFinishedPulling="2026-02-03 06:59:25.173188495 +0000 UTC m=+803.459882301" observedRunningTime="2026-02-03 06:59:25.734105328 +0000 UTC m=+804.020799144" watchObservedRunningTime="2026-02-03 06:59:25.738806092 +0000 UTC m=+804.025499898" Feb 03 06:59:25 crc kubenswrapper[4998]: I0203 06:59:25.763402 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-wsbwf" podStartSLOduration=1.841150061 podStartE2EDuration="4.763385734s" podCreationTimestamp="2026-02-03 06:59:21 +0000 UTC" firstStartedPulling="2026-02-03 06:59:22.27505977 +0000 UTC m=+800.561753576" lastFinishedPulling="2026-02-03 06:59:25.197295433 +0000 UTC m=+803.483989249" observedRunningTime="2026-02-03 06:59:25.760167512 +0000 UTC m=+804.046861348" watchObservedRunningTime="2026-02-03 06:59:25.763385734 +0000 UTC m=+804.050079540" Feb 03 06:59:28 crc kubenswrapper[4998]: I0203 06:59:28.734421 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" event={"ID":"62aaf85e-b285-46ee-94b9-d29f6125d823","Type":"ContainerStarted","Data":"01208bf46666bf1d13b16b6ed4ab9f6b2164788091cb72e1dc60104ca3473315"} Feb 03 06:59:28 crc kubenswrapper[4998]: I0203 06:59:28.758342 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-t7wvp" podStartSLOduration=2.469592431 podStartE2EDuration="7.758308212s" podCreationTimestamp="2026-02-03 06:59:21 +0000 UTC" firstStartedPulling="2026-02-03 06:59:22.470683444 +0000 UTC m=+800.757377260" lastFinishedPulling="2026-02-03 06:59:27.759399235 +0000 UTC m=+806.046093041" observedRunningTime="2026-02-03 06:59:28.751056005 +0000 UTC m=+807.037749851" watchObservedRunningTime="2026-02-03 06:59:28.758308212 +0000 UTC m=+807.045002068" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.254104 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-wsbwf" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.583345 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.583404 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.591412 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.771315 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5b5f5df768-488nq" Feb 03 06:59:32 crc kubenswrapper[4998]: I0203 06:59:32.849422 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:59:42 crc kubenswrapper[4998]: I0203 06:59:42.233192 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7t78k" Feb 03 06:59:51 crc kubenswrapper[4998]: I0203 06:59:51.890291 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 06:59:51 crc kubenswrapper[4998]: I0203 06:59:51.892099 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:51 crc kubenswrapper[4998]: I0203 06:59:51.901480 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.086173 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.086251 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjpb4\" (UniqueName: \"kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.086287 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.186937 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjpb4\" (UniqueName: \"kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.186991 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.187029 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.187558 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.187571 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.206757 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjpb4\" (UniqueName: \"kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4\") pod \"redhat-marketplace-df67d\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.215852 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.412226 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.881278 4998 generic.go:334] "Generic (PLEG): container finished" podID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerID="fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9" exitCode=0 Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.881379 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerDied","Data":"fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9"} Feb 03 06:59:52 crc kubenswrapper[4998]: I0203 06:59:52.881619 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerStarted","Data":"e9abf7f351d0f446b7e666f442da594120e194fd036af1f39a204b22c4a5ebbb"} Feb 03 06:59:53 crc kubenswrapper[4998]: I0203 06:59:53.890214 4998 generic.go:334] "Generic (PLEG): container finished" podID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerID="44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba" exitCode=0 Feb 03 06:59:53 crc kubenswrapper[4998]: I0203 06:59:53.890376 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerDied","Data":"44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba"} Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.104538 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg"] Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.106088 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.109932 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.111076 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9gqw\" (UniqueName: \"kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.111242 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.111405 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.113549 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg"] Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.212014 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.212164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9gqw\" (UniqueName: \"kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.212243 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.213073 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.213334 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.242301 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9gqw\" (UniqueName: \"kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.428133 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.634916 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg"] Feb 03 06:59:54 crc kubenswrapper[4998]: W0203 06:59:54.645446 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50b36942_ee48_48a1_878a_e1b6807f5c89.slice/crio-ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb WatchSource:0}: Error finding container ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb: Status 404 returned error can't find the container with id ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.900178 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerStarted","Data":"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861"} Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.901858 4998 generic.go:334] "Generic (PLEG): container finished" podID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerID="e03c036cb47086cebc03edace1186992977c5bd0a83d66d4c33df2670c9189c4" exitCode=0 Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.901896 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" event={"ID":"50b36942-ee48-48a1-878a-e1b6807f5c89","Type":"ContainerDied","Data":"e03c036cb47086cebc03edace1186992977c5bd0a83d66d4c33df2670c9189c4"} Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.901929 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" event={"ID":"50b36942-ee48-48a1-878a-e1b6807f5c89","Type":"ContainerStarted","Data":"ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb"} Feb 03 06:59:54 crc kubenswrapper[4998]: I0203 06:59:54.921639 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-df67d" podStartSLOduration=2.5023488609999998 podStartE2EDuration="3.921620358s" podCreationTimestamp="2026-02-03 06:59:51 +0000 UTC" firstStartedPulling="2026-02-03 06:59:52.883613827 +0000 UTC m=+831.170307643" lastFinishedPulling="2026-02-03 06:59:54.302885334 +0000 UTC m=+832.589579140" observedRunningTime="2026-02-03 06:59:54.917611984 +0000 UTC m=+833.204305810" watchObservedRunningTime="2026-02-03 06:59:54.921620358 +0000 UTC m=+833.208314164" Feb 03 06:59:56 crc kubenswrapper[4998]: I0203 06:59:56.913498 4998 generic.go:334] "Generic (PLEG): container finished" podID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerID="68f77920ebff705d59fbb44a553f9228f922b7df35f722b1de657b7c312ec17f" exitCode=0 Feb 03 06:59:56 crc kubenswrapper[4998]: I0203 06:59:56.913602 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" event={"ID":"50b36942-ee48-48a1-878a-e1b6807f5c89","Type":"ContainerDied","Data":"68f77920ebff705d59fbb44a553f9228f922b7df35f722b1de657b7c312ec17f"} Feb 03 06:59:57 crc kubenswrapper[4998]: I0203 06:59:57.898669 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-2wmmh" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" containerID="cri-o://cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc" gracePeriod=15 Feb 03 06:59:57 crc kubenswrapper[4998]: I0203 06:59:57.920950 4998 generic.go:334] "Generic (PLEG): container finished" podID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerID="72409b240e37ee31daf7e91a2b4966992d681ea53486ace8ee2c7f655a53f383" exitCode=0 Feb 03 06:59:57 crc kubenswrapper[4998]: I0203 06:59:57.921066 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" event={"ID":"50b36942-ee48-48a1-878a-e1b6807f5c89","Type":"ContainerDied","Data":"72409b240e37ee31daf7e91a2b4966992d681ea53486ace8ee2c7f655a53f383"} Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.246470 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2wmmh_e43c1b92-4c7b-4db9-8363-472ecea3213f/console/0.log" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.246541 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361504 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361551 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361579 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361609 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361640 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5959\" (UniqueName: \"kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361671 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.362562 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.362581 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.362600 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config" (OuterVolumeSpecName: "console-config") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.362624 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca" (OuterVolumeSpecName: "service-ca") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.361709 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert\") pod \"e43c1b92-4c7b-4db9-8363-472ecea3213f\" (UID: \"e43c1b92-4c7b-4db9-8363-472ecea3213f\") " Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.363350 4998 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.363371 4998 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.363382 4998 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.363391 4998 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e43c1b92-4c7b-4db9-8363-472ecea3213f-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.367040 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959" (OuterVolumeSpecName: "kube-api-access-x5959") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "kube-api-access-x5959". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.367371 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.367694 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "e43c1b92-4c7b-4db9-8363-472ecea3213f" (UID: "e43c1b92-4c7b-4db9-8363-472ecea3213f"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.465017 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5959\" (UniqueName: \"kubernetes.io/projected/e43c1b92-4c7b-4db9-8363-472ecea3213f-kube-api-access-x5959\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.465054 4998 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.465068 4998 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e43c1b92-4c7b-4db9-8363-472ecea3213f-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929264 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2wmmh_e43c1b92-4c7b-4db9-8363-472ecea3213f/console/0.log" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929575 4998 generic.go:334] "Generic (PLEG): container finished" podID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerID="cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc" exitCode=2 Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929659 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2wmmh" event={"ID":"e43c1b92-4c7b-4db9-8363-472ecea3213f","Type":"ContainerDied","Data":"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc"} Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929686 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2wmmh" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929714 4998 scope.go:117] "RemoveContainer" containerID="cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.929701 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2wmmh" event={"ID":"e43c1b92-4c7b-4db9-8363-472ecea3213f","Type":"ContainerDied","Data":"a6e2803bda9d077bfa7c1f6b79103dddac142d579ca236c772369b068b13ec9e"} Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.951354 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.955287 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-2wmmh"] Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.958331 4998 scope.go:117] "RemoveContainer" containerID="cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc" Feb 03 06:59:58 crc kubenswrapper[4998]: E0203 06:59:58.958859 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc\": container with ID starting with cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc not found: ID does not exist" containerID="cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc" Feb 03 06:59:58 crc kubenswrapper[4998]: I0203 06:59:58.958887 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc"} err="failed to get container status \"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc\": rpc error: code = NotFound desc = could not find container \"cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc\": container with ID starting with cd828f58e5175060a4690c17068da1153d84f5f33c1bf286043180d38674c6dc not found: ID does not exist" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.176420 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.274690 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9gqw\" (UniqueName: \"kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw\") pod \"50b36942-ee48-48a1-878a-e1b6807f5c89\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.274758 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util\") pod \"50b36942-ee48-48a1-878a-e1b6807f5c89\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.274838 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle\") pod \"50b36942-ee48-48a1-878a-e1b6807f5c89\" (UID: \"50b36942-ee48-48a1-878a-e1b6807f5c89\") " Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.276078 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle" (OuterVolumeSpecName: "bundle") pod "50b36942-ee48-48a1-878a-e1b6807f5c89" (UID: "50b36942-ee48-48a1-878a-e1b6807f5c89"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.279426 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw" (OuterVolumeSpecName: "kube-api-access-x9gqw") pod "50b36942-ee48-48a1-878a-e1b6807f5c89" (UID: "50b36942-ee48-48a1-878a-e1b6807f5c89"). InnerVolumeSpecName "kube-api-access-x9gqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.294431 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util" (OuterVolumeSpecName: "util") pod "50b36942-ee48-48a1-878a-e1b6807f5c89" (UID: "50b36942-ee48-48a1-878a-e1b6807f5c89"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.375646 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9gqw\" (UniqueName: \"kubernetes.io/projected/50b36942-ee48-48a1-878a-e1b6807f5c89-kube-api-access-x9gqw\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.375698 4998 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-util\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.375712 4998 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50b36942-ee48-48a1-878a-e1b6807f5c89-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.938455 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" event={"ID":"50b36942-ee48-48a1-878a-e1b6807f5c89","Type":"ContainerDied","Data":"ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb"} Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.938510 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff3b74f9d145f8f26908ef488474d69bf5b488dc1b15ec6a27d34bdeec43c3cb" Feb 03 06:59:59 crc kubenswrapper[4998]: I0203 06:59:59.938595 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.159832 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz"] Feb 03 07:00:00 crc kubenswrapper[4998]: E0203 07:00:00.160112 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="pull" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160138 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="pull" Feb 03 07:00:00 crc kubenswrapper[4998]: E0203 07:00:00.160168 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="util" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160181 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="util" Feb 03 07:00:00 crc kubenswrapper[4998]: E0203 07:00:00.160202 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160215 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" Feb 03 07:00:00 crc kubenswrapper[4998]: E0203 07:00:00.160236 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="extract" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160246 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="extract" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160418 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" containerName="console" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.160447 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b36942-ee48-48a1-878a-e1b6807f5c89" containerName="extract" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.161010 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.163243 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.167577 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.171190 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz"] Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.286258 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.286316 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8qdp\" (UniqueName: \"kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.286359 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.387438 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.387533 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.387576 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8qdp\" (UniqueName: \"kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.389143 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.393968 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.407889 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8qdp\" (UniqueName: \"kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp\") pod \"collect-profiles-29501700-m48cz\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.435702 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43c1b92-4c7b-4db9-8363-472ecea3213f" path="/var/lib/kubelet/pods/e43c1b92-4c7b-4db9-8363-472ecea3213f/volumes" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.486635 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.873666 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz"] Feb 03 07:00:00 crc kubenswrapper[4998]: W0203 07:00:00.880396 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3825c7e6_09d8_43cb_bdd0_b71fb2690844.slice/crio-b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da WatchSource:0}: Error finding container b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da: Status 404 returned error can't find the container with id b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da Feb 03 07:00:00 crc kubenswrapper[4998]: I0203 07:00:00.953155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" event={"ID":"3825c7e6-09d8-43cb-bdd0-b71fb2690844","Type":"ContainerStarted","Data":"b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da"} Feb 03 07:00:01 crc kubenswrapper[4998]: I0203 07:00:01.959898 4998 generic.go:334] "Generic (PLEG): container finished" podID="3825c7e6-09d8-43cb-bdd0-b71fb2690844" containerID="3e044462a2ceba5cd85c9ae93e890507060884ab0cad0ece206a95dd44f10a1e" exitCode=0 Feb 03 07:00:01 crc kubenswrapper[4998]: I0203 07:00:01.959954 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" event={"ID":"3825c7e6-09d8-43cb-bdd0-b71fb2690844","Type":"ContainerDied","Data":"3e044462a2ceba5cd85c9ae93e890507060884ab0cad0ece206a95dd44f10a1e"} Feb 03 07:00:02 crc kubenswrapper[4998]: I0203 07:00:02.216552 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:02 crc kubenswrapper[4998]: I0203 07:00:02.216622 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:02 crc kubenswrapper[4998]: I0203 07:00:02.261959 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.004712 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.189837 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.321613 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8qdp\" (UniqueName: \"kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp\") pod \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.321736 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume\") pod \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.321819 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume\") pod \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\" (UID: \"3825c7e6-09d8-43cb-bdd0-b71fb2690844\") " Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.322268 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume" (OuterVolumeSpecName: "config-volume") pod "3825c7e6-09d8-43cb-bdd0-b71fb2690844" (UID: "3825c7e6-09d8-43cb-bdd0-b71fb2690844"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.326797 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3825c7e6-09d8-43cb-bdd0-b71fb2690844" (UID: "3825c7e6-09d8-43cb-bdd0-b71fb2690844"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.326910 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp" (OuterVolumeSpecName: "kube-api-access-c8qdp") pod "3825c7e6-09d8-43cb-bdd0-b71fb2690844" (UID: "3825c7e6-09d8-43cb-bdd0-b71fb2690844"). InnerVolumeSpecName "kube-api-access-c8qdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.423390 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3825c7e6-09d8-43cb-bdd0-b71fb2690844-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.423453 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3825c7e6-09d8-43cb-bdd0-b71fb2690844-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.423465 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8qdp\" (UniqueName: \"kubernetes.io/projected/3825c7e6-09d8-43cb-bdd0-b71fb2690844-kube-api-access-c8qdp\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.972236 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.972285 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz" event={"ID":"3825c7e6-09d8-43cb-bdd0-b71fb2690844","Type":"ContainerDied","Data":"b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da"} Feb 03 07:00:03 crc kubenswrapper[4998]: I0203 07:00:03.972387 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3108001f7a35db2a878662d8119d1d3b93ad5c300ca0ff5586936f6deb710da" Feb 03 07:00:04 crc kubenswrapper[4998]: I0203 07:00:04.252289 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 07:00:04 crc kubenswrapper[4998]: I0203 07:00:04.977272 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-df67d" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="registry-server" containerID="cri-o://d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861" gracePeriod=2 Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.309424 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.450554 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities\") pod \"0ba5886b-b9d8-4310-8d5a-46242b44d034\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.450626 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjpb4\" (UniqueName: \"kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4\") pod \"0ba5886b-b9d8-4310-8d5a-46242b44d034\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.450668 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content\") pod \"0ba5886b-b9d8-4310-8d5a-46242b44d034\" (UID: \"0ba5886b-b9d8-4310-8d5a-46242b44d034\") " Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.451529 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities" (OuterVolumeSpecName: "utilities") pod "0ba5886b-b9d8-4310-8d5a-46242b44d034" (UID: "0ba5886b-b9d8-4310-8d5a-46242b44d034"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.455525 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4" (OuterVolumeSpecName: "kube-api-access-bjpb4") pod "0ba5886b-b9d8-4310-8d5a-46242b44d034" (UID: "0ba5886b-b9d8-4310-8d5a-46242b44d034"). InnerVolumeSpecName "kube-api-access-bjpb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.471982 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ba5886b-b9d8-4310-8d5a-46242b44d034" (UID: "0ba5886b-b9d8-4310-8d5a-46242b44d034"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.552069 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.552333 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjpb4\" (UniqueName: \"kubernetes.io/projected/0ba5886b-b9d8-4310-8d5a-46242b44d034-kube-api-access-bjpb4\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.552413 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba5886b-b9d8-4310-8d5a-46242b44d034-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.987573 4998 generic.go:334] "Generic (PLEG): container finished" podID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerID="d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861" exitCode=0 Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.987644 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-df67d" Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.987634 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerDied","Data":"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861"} Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.988048 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-df67d" event={"ID":"0ba5886b-b9d8-4310-8d5a-46242b44d034","Type":"ContainerDied","Data":"e9abf7f351d0f446b7e666f442da594120e194fd036af1f39a204b22c4a5ebbb"} Feb 03 07:00:05 crc kubenswrapper[4998]: I0203 07:00:05.988069 4998 scope.go:117] "RemoveContainer" containerID="d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.004968 4998 scope.go:117] "RemoveContainer" containerID="44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.020714 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.020831 4998 scope.go:117] "RemoveContainer" containerID="fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.029923 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-df67d"] Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.037913 4998 scope.go:117] "RemoveContainer" containerID="d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861" Feb 03 07:00:06 crc kubenswrapper[4998]: E0203 07:00:06.038289 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861\": container with ID starting with d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861 not found: ID does not exist" containerID="d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.038344 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861"} err="failed to get container status \"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861\": rpc error: code = NotFound desc = could not find container \"d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861\": container with ID starting with d1b0f72d07c6ec7e1f53105ace073600203eb44ad9a6b0ce6c9927b65f2bc861 not found: ID does not exist" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.038375 4998 scope.go:117] "RemoveContainer" containerID="44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba" Feb 03 07:00:06 crc kubenswrapper[4998]: E0203 07:00:06.038748 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba\": container with ID starting with 44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba not found: ID does not exist" containerID="44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.038765 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba"} err="failed to get container status \"44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba\": rpc error: code = NotFound desc = could not find container \"44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba\": container with ID starting with 44423892cb0d938910a08b7089cb062a6f06fed2e09300d35c20dd26fef023ba not found: ID does not exist" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.038781 4998 scope.go:117] "RemoveContainer" containerID="fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9" Feb 03 07:00:06 crc kubenswrapper[4998]: E0203 07:00:06.040921 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9\": container with ID starting with fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9 not found: ID does not exist" containerID="fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.040964 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9"} err="failed to get container status \"fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9\": rpc error: code = NotFound desc = could not find container \"fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9\": container with ID starting with fa548b72c5c1be1ee18eb1bab281bf0967d3ec4af438290d45ac31a073502cf9 not found: ID does not exist" Feb 03 07:00:06 crc kubenswrapper[4998]: I0203 07:00:06.434279 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" path="/var/lib/kubelet/pods/0ba5886b-b9d8-4310-8d5a-46242b44d034/volumes" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.839408 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw"] Feb 03 07:00:07 crc kubenswrapper[4998]: E0203 07:00:07.840021 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="extract-content" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840037 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="extract-content" Feb 03 07:00:07 crc kubenswrapper[4998]: E0203 07:00:07.840048 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="registry-server" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840056 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="registry-server" Feb 03 07:00:07 crc kubenswrapper[4998]: E0203 07:00:07.840074 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="extract-utilities" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840082 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="extract-utilities" Feb 03 07:00:07 crc kubenswrapper[4998]: E0203 07:00:07.840096 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3825c7e6-09d8-43cb-bdd0-b71fb2690844" containerName="collect-profiles" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840104 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3825c7e6-09d8-43cb-bdd0-b71fb2690844" containerName="collect-profiles" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840223 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ba5886b-b9d8-4310-8d5a-46242b44d034" containerName="registry-server" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840267 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3825c7e6-09d8-43cb-bdd0-b71fb2690844" containerName="collect-profiles" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.840705 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.842388 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.844416 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.845025 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-8mzp9" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.845185 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.846253 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.865118 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw"] Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.981435 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxg4v\" (UniqueName: \"kubernetes.io/projected/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-kube-api-access-jxg4v\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.981478 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-apiservice-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:07 crc kubenswrapper[4998]: I0203 07:00:07.981511 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-webhook-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.066095 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6"] Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.066899 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.068630 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.068933 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.070868 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-6pfbp" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.079406 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6"] Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082670 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-webhook-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082727 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7kvp\" (UniqueName: \"kubernetes.io/projected/416f5c48-0a2c-4780-8a2f-50892fd3d008-kube-api-access-b7kvp\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082755 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-apiservice-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082824 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-webhook-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082878 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxg4v\" (UniqueName: \"kubernetes.io/projected/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-kube-api-access-jxg4v\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.082918 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-apiservice-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.087267 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-apiservice-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.088208 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-webhook-cert\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.100375 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxg4v\" (UniqueName: \"kubernetes.io/projected/9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a-kube-api-access-jxg4v\") pod \"metallb-operator-controller-manager-dc5cbfb88-f5dkw\" (UID: \"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a\") " pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.156386 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.188226 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-webhook-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.188301 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7kvp\" (UniqueName: \"kubernetes.io/projected/416f5c48-0a2c-4780-8a2f-50892fd3d008-kube-api-access-b7kvp\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.188324 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-apiservice-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.195006 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-apiservice-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.195197 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/416f5c48-0a2c-4780-8a2f-50892fd3d008-webhook-cert\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.209337 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7kvp\" (UniqueName: \"kubernetes.io/projected/416f5c48-0a2c-4780-8a2f-50892fd3d008-kube-api-access-b7kvp\") pod \"metallb-operator-webhook-server-7956f64f85-m4jn6\" (UID: \"416f5c48-0a2c-4780-8a2f-50892fd3d008\") " pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.377051 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw"] Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.381190 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:08 crc kubenswrapper[4998]: W0203 07:00:08.395123 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e1313b8_f7b8_4b68_a1c6_0ece2e40b91a.slice/crio-4d68b2feca2ae2a1911c876fb250ef7bb6de2a58b14b150d5bc5ede03920713d WatchSource:0}: Error finding container 4d68b2feca2ae2a1911c876fb250ef7bb6de2a58b14b150d5bc5ede03920713d: Status 404 returned error can't find the container with id 4d68b2feca2ae2a1911c876fb250ef7bb6de2a58b14b150d5bc5ede03920713d Feb 03 07:00:08 crc kubenswrapper[4998]: I0203 07:00:08.597463 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6"] Feb 03 07:00:09 crc kubenswrapper[4998]: I0203 07:00:09.004220 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" event={"ID":"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a","Type":"ContainerStarted","Data":"4d68b2feca2ae2a1911c876fb250ef7bb6de2a58b14b150d5bc5ede03920713d"} Feb 03 07:00:09 crc kubenswrapper[4998]: I0203 07:00:09.005390 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" event={"ID":"416f5c48-0a2c-4780-8a2f-50892fd3d008","Type":"ContainerStarted","Data":"c0721f31e110af134d0a52907d245eb3d67c9eb64c2f816b13bbeae792b2ca86"} Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.036037 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" event={"ID":"416f5c48-0a2c-4780-8a2f-50892fd3d008","Type":"ContainerStarted","Data":"dc43e22d2d556a1b6d5cad6db796aa9fcb1f3802506959b00d3a0e5c8d28972d"} Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.036614 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.038079 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" event={"ID":"9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a","Type":"ContainerStarted","Data":"c9d482dab9e84210491985e465ba09ad08b20547cae7417c83cbc659cc907c4f"} Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.038232 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.056649 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" podStartSLOduration=0.904069079 podStartE2EDuration="6.056633086s" podCreationTimestamp="2026-02-03 07:00:08 +0000 UTC" firstStartedPulling="2026-02-03 07:00:08.614173121 +0000 UTC m=+846.900866927" lastFinishedPulling="2026-02-03 07:00:13.766737128 +0000 UTC m=+852.053430934" observedRunningTime="2026-02-03 07:00:14.055143903 +0000 UTC m=+852.341837709" watchObservedRunningTime="2026-02-03 07:00:14.056633086 +0000 UTC m=+852.343326882" Feb 03 07:00:14 crc kubenswrapper[4998]: I0203 07:00:14.085609 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" podStartSLOduration=1.756594802 podStartE2EDuration="7.085592643s" podCreationTimestamp="2026-02-03 07:00:07 +0000 UTC" firstStartedPulling="2026-02-03 07:00:08.398564157 +0000 UTC m=+846.685257963" lastFinishedPulling="2026-02-03 07:00:13.727561998 +0000 UTC m=+852.014255804" observedRunningTime="2026-02-03 07:00:14.08196956 +0000 UTC m=+852.368663366" watchObservedRunningTime="2026-02-03 07:00:14.085592643 +0000 UTC m=+852.372286449" Feb 03 07:00:28 crc kubenswrapper[4998]: I0203 07:00:28.386260 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7956f64f85-m4jn6" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.159771 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-dc5cbfb88-f5dkw" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.869609 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4zdf4"] Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.872150 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.875195 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.875505 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.875687 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-w6xqx" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.879627 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk"] Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.880603 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.883645 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.892936 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk"] Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931416 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931455 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-sockets\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931478 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-conf\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931504 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-reloader\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931531 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27325089-5914-4776-8709-b7068b537775-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931579 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljsxj\" (UniqueName: \"kubernetes.io/projected/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-kube-api-access-ljsxj\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931665 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-startup\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931728 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics-certs\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.931758 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9kk4\" (UniqueName: \"kubernetes.io/projected/27325089-5914-4776-8709-b7068b537775-kube-api-access-l9kk4\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.971462 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-jbt2j"] Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.972540 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jbt2j" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.982167 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-xl925"] Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.982207 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.982420 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-9dtlb" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.982904 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.983130 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.984918 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:48 crc kubenswrapper[4998]: I0203 07:00:48.990877 4998 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.010004 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-xl925"] Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035449 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27325089-5914-4776-8709-b7068b537775-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035494 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-metrics-certs\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035521 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljsxj\" (UniqueName: \"kubernetes.io/projected/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-kube-api-access-ljsxj\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035537 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8330499e-9bd0-426b-bf63-9a7576e6d615-metallb-excludel2\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035557 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-startup\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035574 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gn8f\" (UniqueName: \"kubernetes.io/projected/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-kube-api-access-7gn8f\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035615 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics-certs\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035644 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9kk4\" (UniqueName: \"kubernetes.io/projected/27325089-5914-4776-8709-b7068b537775-kube-api-access-l9kk4\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dfdz\" (UniqueName: \"kubernetes.io/projected/8330499e-9bd0-426b-bf63-9a7576e6d615-kube-api-access-4dfdz\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035687 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035711 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035727 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-sockets\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-conf\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035760 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-cert\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035795 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.035819 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-reloader\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.036230 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-reloader\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.036269 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-sockets\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.036468 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-conf\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.036558 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.037134 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-frr-startup\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.054561 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27325089-5914-4776-8709-b7068b537775-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.054935 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-metrics-certs\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.057298 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9kk4\" (UniqueName: \"kubernetes.io/projected/27325089-5914-4776-8709-b7068b537775-kube-api-access-l9kk4\") pod \"frr-k8s-webhook-server-7df86c4f6c-448lk\" (UID: \"27325089-5914-4776-8709-b7068b537775\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.059260 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljsxj\" (UniqueName: \"kubernetes.io/projected/fbe0ed58-3a38-4039-b0db-6c8e52675fe6-kube-api-access-ljsxj\") pod \"frr-k8s-4zdf4\" (UID: \"fbe0ed58-3a38-4039-b0db-6c8e52675fe6\") " pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136521 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-metrics-certs\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136586 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8330499e-9bd0-426b-bf63-9a7576e6d615-metallb-excludel2\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136611 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gn8f\" (UniqueName: \"kubernetes.io/projected/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-kube-api-access-7gn8f\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136647 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dfdz\" (UniqueName: \"kubernetes.io/projected/8330499e-9bd0-426b-bf63-9a7576e6d615-kube-api-access-4dfdz\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136697 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-cert\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.136713 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.136898 4998 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.136992 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs podName:8330499e-9bd0-426b-bf63-9a7576e6d615 nodeName:}" failed. No retries permitted until 2026-02-03 07:00:49.636974462 +0000 UTC m=+887.923668268 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs") pod "speaker-jbt2j" (UID: "8330499e-9bd0-426b-bf63-9a7576e6d615") : secret "speaker-certs-secret" not found Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.137300 4998 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.137356 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist podName:8330499e-9bd0-426b-bf63-9a7576e6d615 nodeName:}" failed. No retries permitted until 2026-02-03 07:00:49.637340032 +0000 UTC m=+887.924033828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist") pod "speaker-jbt2j" (UID: "8330499e-9bd0-426b-bf63-9a7576e6d615") : secret "metallb-memberlist" not found Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.137496 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8330499e-9bd0-426b-bf63-9a7576e6d615-metallb-excludel2\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.140418 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-metrics-certs\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.141271 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-cert\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.152173 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dfdz\" (UniqueName: \"kubernetes.io/projected/8330499e-9bd0-426b-bf63-9a7576e6d615-kube-api-access-4dfdz\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.171698 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gn8f\" (UniqueName: \"kubernetes.io/projected/6e8b1d0f-1364-40b4-8796-91ce27a5c0fa-kube-api-access-7gn8f\") pod \"controller-6968d8fdc4-xl925\" (UID: \"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa\") " pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.201334 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.210268 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.317132 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.420582 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk"] Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.643031 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.643102 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.643286 4998 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 03 07:00:49 crc kubenswrapper[4998]: E0203 07:00:49.643346 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist podName:8330499e-9bd0-426b-bf63-9a7576e6d615 nodeName:}" failed. No retries permitted until 2026-02-03 07:00:50.643327987 +0000 UTC m=+888.930021793 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist") pod "speaker-jbt2j" (UID: "8330499e-9bd0-426b-bf63-9a7576e6d615") : secret "metallb-memberlist" not found Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.654025 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-metrics-certs\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:49 crc kubenswrapper[4998]: I0203 07:00:49.721299 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-xl925"] Feb 03 07:00:49 crc kubenswrapper[4998]: W0203 07:00:49.725550 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e8b1d0f_1364_40b4_8796_91ce27a5c0fa.slice/crio-2edb3f7767b9c90982d182e26f53562257a5e7dda9aefb3cd89d44e01f5bc7f9 WatchSource:0}: Error finding container 2edb3f7767b9c90982d182e26f53562257a5e7dda9aefb3cd89d44e01f5bc7f9: Status 404 returned error can't find the container with id 2edb3f7767b9c90982d182e26f53562257a5e7dda9aefb3cd89d44e01f5bc7f9 Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.230758 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-xl925" event={"ID":"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa","Type":"ContainerStarted","Data":"318838ff8e7017741b56dfce195279ea49a10edba6825c06f5e0c23255e9c92d"} Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.231124 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-xl925" event={"ID":"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa","Type":"ContainerStarted","Data":"425f718d9ec6cc3123fc52f6dce0250f18b769b74d4181692b715a9642c9313e"} Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.231136 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-xl925" event={"ID":"6e8b1d0f-1364-40b4-8796-91ce27a5c0fa","Type":"ContainerStarted","Data":"2edb3f7767b9c90982d182e26f53562257a5e7dda9aefb3cd89d44e01f5bc7f9"} Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.231155 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.234077 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"f6c12963a05cf9c19d131c5a9f97df1a98eb632947ebd297aef85cc95fd1565d"} Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.234928 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" event={"ID":"27325089-5914-4776-8709-b7068b537775","Type":"ContainerStarted","Data":"268ff09c1194d4d1861a93017ac03f6d911d7083d8305575a50a91b2966f7dd7"} Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.250945 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-xl925" podStartSLOduration=2.250929638 podStartE2EDuration="2.250929638s" podCreationTimestamp="2026-02-03 07:00:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:00:50.246940934 +0000 UTC m=+888.533634760" watchObservedRunningTime="2026-02-03 07:00:50.250929638 +0000 UTC m=+888.537623434" Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.655279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.662492 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8330499e-9bd0-426b-bf63-9a7576e6d615-memberlist\") pod \"speaker-jbt2j\" (UID: \"8330499e-9bd0-426b-bf63-9a7576e6d615\") " pod="metallb-system/speaker-jbt2j" Feb 03 07:00:50 crc kubenswrapper[4998]: I0203 07:00:50.798740 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jbt2j" Feb 03 07:00:50 crc kubenswrapper[4998]: W0203 07:00:50.866977 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8330499e_9bd0_426b_bf63_9a7576e6d615.slice/crio-3d810056fb76653653ddc6a406a4620847ab5831df6393c6e168c4fbd02e222d WatchSource:0}: Error finding container 3d810056fb76653653ddc6a406a4620847ab5831df6393c6e168c4fbd02e222d: Status 404 returned error can't find the container with id 3d810056fb76653653ddc6a406a4620847ab5831df6393c6e168c4fbd02e222d Feb 03 07:00:51 crc kubenswrapper[4998]: I0203 07:00:51.252035 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jbt2j" event={"ID":"8330499e-9bd0-426b-bf63-9a7576e6d615","Type":"ContainerStarted","Data":"9adef75fba44c6e35299e59873f67c142b5b79951751dd9a28974fbf870576b1"} Feb 03 07:00:51 crc kubenswrapper[4998]: I0203 07:00:51.252360 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jbt2j" event={"ID":"8330499e-9bd0-426b-bf63-9a7576e6d615","Type":"ContainerStarted","Data":"3d810056fb76653653ddc6a406a4620847ab5831df6393c6e168c4fbd02e222d"} Feb 03 07:00:52 crc kubenswrapper[4998]: I0203 07:00:52.263187 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jbt2j" event={"ID":"8330499e-9bd0-426b-bf63-9a7576e6d615","Type":"ContainerStarted","Data":"d4edb2dc260899a97f2678e57c5d253b7f12aa7bf317d6e5dc62ddcd98c4dbe1"} Feb 03 07:00:52 crc kubenswrapper[4998]: I0203 07:00:52.264015 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-jbt2j" Feb 03 07:00:52 crc kubenswrapper[4998]: I0203 07:00:52.277898 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-jbt2j" podStartSLOduration=4.277881508 podStartE2EDuration="4.277881508s" podCreationTimestamp="2026-02-03 07:00:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:00:52.27553324 +0000 UTC m=+890.562227046" watchObservedRunningTime="2026-02-03 07:00:52.277881508 +0000 UTC m=+890.564575314" Feb 03 07:00:57 crc kubenswrapper[4998]: I0203 07:00:57.314415 4998 generic.go:334] "Generic (PLEG): container finished" podID="fbe0ed58-3a38-4039-b0db-6c8e52675fe6" containerID="df93b5d8fe887c002a61e1d8c79b52e8ed9e5b132bff3cb6425904e64a631636" exitCode=0 Feb 03 07:00:57 crc kubenswrapper[4998]: I0203 07:00:57.314526 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerDied","Data":"df93b5d8fe887c002a61e1d8c79b52e8ed9e5b132bff3cb6425904e64a631636"} Feb 03 07:00:57 crc kubenswrapper[4998]: I0203 07:00:57.317127 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" event={"ID":"27325089-5914-4776-8709-b7068b537775","Type":"ContainerStarted","Data":"70e461e51a5835fd19b3c12924bc996ff86e172e854079be4bf9205fe37a3d69"} Feb 03 07:00:57 crc kubenswrapper[4998]: I0203 07:00:57.317325 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:00:57 crc kubenswrapper[4998]: I0203 07:00:57.378357 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" podStartSLOduration=1.88267862 podStartE2EDuration="9.378324984s" podCreationTimestamp="2026-02-03 07:00:48 +0000 UTC" firstStartedPulling="2026-02-03 07:00:49.428176246 +0000 UTC m=+887.714870052" lastFinishedPulling="2026-02-03 07:00:56.92382261 +0000 UTC m=+895.210516416" observedRunningTime="2026-02-03 07:00:57.369611865 +0000 UTC m=+895.656305691" watchObservedRunningTime="2026-02-03 07:00:57.378324984 +0000 UTC m=+895.665018810" Feb 03 07:00:58 crc kubenswrapper[4998]: I0203 07:00:58.326023 4998 generic.go:334] "Generic (PLEG): container finished" podID="fbe0ed58-3a38-4039-b0db-6c8e52675fe6" containerID="8a0811199f08612cd8b95dd1a320afdcf7f82166a4d8c60763902ad8fbc0ce24" exitCode=0 Feb 03 07:00:58 crc kubenswrapper[4998]: I0203 07:00:58.326155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerDied","Data":"8a0811199f08612cd8b95dd1a320afdcf7f82166a4d8c60763902ad8fbc0ce24"} Feb 03 07:00:59 crc kubenswrapper[4998]: I0203 07:00:59.321564 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-xl925" Feb 03 07:00:59 crc kubenswrapper[4998]: I0203 07:00:59.334005 4998 generic.go:334] "Generic (PLEG): container finished" podID="fbe0ed58-3a38-4039-b0db-6c8e52675fe6" containerID="b575466616092f933c4ab6fed9124ff8590fc5cffe107a6f6f2414d8f1a39894" exitCode=0 Feb 03 07:00:59 crc kubenswrapper[4998]: I0203 07:00:59.334064 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerDied","Data":"b575466616092f933c4ab6fed9124ff8590fc5cffe107a6f6f2414d8f1a39894"} Feb 03 07:01:00 crc kubenswrapper[4998]: I0203 07:01:00.342824 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"dcbeb176523aed1302e9f5cc27d9eaac6bf46541b122278d9afe5254a4b357ec"} Feb 03 07:01:00 crc kubenswrapper[4998]: I0203 07:01:00.343123 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"e40e0d6e859d31fe7b7ac07fc2c061d189d9d81cdb1de7da1341450db3204f60"} Feb 03 07:01:00 crc kubenswrapper[4998]: I0203 07:01:00.343137 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"4a01f11f027312519e7696188dccf92b5997816093d9a38aeb6f316fe11514c2"} Feb 03 07:01:00 crc kubenswrapper[4998]: I0203 07:01:00.343147 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"a4afdb254deea614f6cbb8c7e736e407ceacd5c580e71b504badc2b5e46d42d1"} Feb 03 07:01:00 crc kubenswrapper[4998]: I0203 07:01:00.343157 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"a2104f0d25919aae2fd124b52f00091b98a3d73b67acbd2bb1595ae4e800fc5f"} Feb 03 07:01:01 crc kubenswrapper[4998]: I0203 07:01:01.353457 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4zdf4" event={"ID":"fbe0ed58-3a38-4039-b0db-6c8e52675fe6","Type":"ContainerStarted","Data":"5034c36ab5265d838074fdc6510efabab61c461db35269be240d32d1eebd8f1b"} Feb 03 07:01:01 crc kubenswrapper[4998]: I0203 07:01:01.353644 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:01:01 crc kubenswrapper[4998]: I0203 07:01:01.374917 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4zdf4" podStartSLOduration=5.800369953 podStartE2EDuration="13.374903353s" podCreationTimestamp="2026-02-03 07:00:48 +0000 UTC" firstStartedPulling="2026-02-03 07:00:49.384106976 +0000 UTC m=+887.670800782" lastFinishedPulling="2026-02-03 07:00:56.958640376 +0000 UTC m=+895.245334182" observedRunningTime="2026-02-03 07:01:01.373422111 +0000 UTC m=+899.660115927" watchObservedRunningTime="2026-02-03 07:01:01.374903353 +0000 UTC m=+899.661597159" Feb 03 07:01:04 crc kubenswrapper[4998]: I0203 07:01:04.203007 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:01:04 crc kubenswrapper[4998]: I0203 07:01:04.244205 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.084451 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.086169 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.101377 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.101866 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blw55\" (UniqueName: \"kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.101929 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.104292 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.202485 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.202581 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blw55\" (UniqueName: \"kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.202603 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.203044 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.203061 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.223437 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blw55\" (UniqueName: \"kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55\") pod \"community-operators-tl8gv\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.406754 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:07 crc kubenswrapper[4998]: I0203 07:01:07.643323 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:07 crc kubenswrapper[4998]: W0203 07:01:07.655431 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f49d8a9_3796_4539_88f1_fd1305882ed0.slice/crio-000bdcef4c7ea9eb6a572b779272a49818e579bcf1502bbe8736442d9c3b1821 WatchSource:0}: Error finding container 000bdcef4c7ea9eb6a572b779272a49818e579bcf1502bbe8736442d9c3b1821: Status 404 returned error can't find the container with id 000bdcef4c7ea9eb6a572b779272a49818e579bcf1502bbe8736442d9c3b1821 Feb 03 07:01:08 crc kubenswrapper[4998]: I0203 07:01:08.399610 4998 generic.go:334] "Generic (PLEG): container finished" podID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerID="a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1" exitCode=0 Feb 03 07:01:08 crc kubenswrapper[4998]: I0203 07:01:08.399655 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerDied","Data":"a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1"} Feb 03 07:01:08 crc kubenswrapper[4998]: I0203 07:01:08.399678 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerStarted","Data":"000bdcef4c7ea9eb6a572b779272a49818e579bcf1502bbe8736442d9c3b1821"} Feb 03 07:01:09 crc kubenswrapper[4998]: I0203 07:01:09.205289 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4zdf4" Feb 03 07:01:09 crc kubenswrapper[4998]: I0203 07:01:09.219375 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-448lk" Feb 03 07:01:09 crc kubenswrapper[4998]: I0203 07:01:09.406451 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerStarted","Data":"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a"} Feb 03 07:01:10 crc kubenswrapper[4998]: I0203 07:01:10.414949 4998 generic.go:334] "Generic (PLEG): container finished" podID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerID="4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a" exitCode=0 Feb 03 07:01:10 crc kubenswrapper[4998]: I0203 07:01:10.414988 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerDied","Data":"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a"} Feb 03 07:01:10 crc kubenswrapper[4998]: I0203 07:01:10.802200 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-jbt2j" Feb 03 07:01:11 crc kubenswrapper[4998]: I0203 07:01:11.422663 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerStarted","Data":"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894"} Feb 03 07:01:11 crc kubenswrapper[4998]: I0203 07:01:11.441192 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tl8gv" podStartSLOduration=1.964850512 podStartE2EDuration="4.441171177s" podCreationTimestamp="2026-02-03 07:01:07 +0000 UTC" firstStartedPulling="2026-02-03 07:01:08.400805756 +0000 UTC m=+906.687499562" lastFinishedPulling="2026-02-03 07:01:10.877126421 +0000 UTC m=+909.163820227" observedRunningTime="2026-02-03 07:01:11.438282454 +0000 UTC m=+909.724976260" watchObservedRunningTime="2026-02-03 07:01:11.441171177 +0000 UTC m=+909.727864983" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.499222 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh"] Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.500886 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.507772 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.517882 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh"] Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.668422 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdqr5\" (UniqueName: \"kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.668466 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.668627 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.769704 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdqr5\" (UniqueName: \"kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.769755 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.769832 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.770409 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.770406 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.790134 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdqr5\" (UniqueName: \"kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:12 crc kubenswrapper[4998]: I0203 07:01:12.817750 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:13 crc kubenswrapper[4998]: I0203 07:01:13.055839 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh"] Feb 03 07:01:13 crc kubenswrapper[4998]: E0203 07:01:13.367091 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35e5b9ab_9f2d_4936_a208_ff1897002f2a.slice/crio-conmon-9da8dd00ac3cd80c54295d254b9622d189b0f87afff66c743d73b91f1070a68c.scope\": RecentStats: unable to find data in memory cache]" Feb 03 07:01:13 crc kubenswrapper[4998]: I0203 07:01:13.433797 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" event={"ID":"35e5b9ab-9f2d-4936-a208-ff1897002f2a","Type":"ContainerDied","Data":"9da8dd00ac3cd80c54295d254b9622d189b0f87afff66c743d73b91f1070a68c"} Feb 03 07:01:13 crc kubenswrapper[4998]: I0203 07:01:13.433824 4998 generic.go:334] "Generic (PLEG): container finished" podID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerID="9da8dd00ac3cd80c54295d254b9622d189b0f87afff66c743d73b91f1070a68c" exitCode=0 Feb 03 07:01:13 crc kubenswrapper[4998]: I0203 07:01:13.433893 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" event={"ID":"35e5b9ab-9f2d-4936-a208-ff1897002f2a","Type":"ContainerStarted","Data":"b4abb2417e80449a7ee817fb7e4f29fafda4f59773a9058b6970a250dddda187"} Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.407891 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.408411 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.448371 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.458287 4998 generic.go:334] "Generic (PLEG): container finished" podID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerID="48daf02d46414c850a1ebc50a1a01f129018ac04d7ee1e9d633d653fd5218f27" exitCode=0 Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.458483 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" event={"ID":"35e5b9ab-9f2d-4936-a208-ff1897002f2a","Type":"ContainerDied","Data":"48daf02d46414c850a1ebc50a1a01f129018ac04d7ee1e9d633d653fd5218f27"} Feb 03 07:01:17 crc kubenswrapper[4998]: I0203 07:01:17.507611 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.260406 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.261645 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.270693 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.340570 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mcth\" (UniqueName: \"kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.340634 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.340670 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.441629 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mcth\" (UniqueName: \"kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.441750 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.441818 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.442405 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.442462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.463167 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mcth\" (UniqueName: \"kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth\") pod \"certified-operators-pvsrq\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.466593 4998 generic.go:334] "Generic (PLEG): container finished" podID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerID="bb8fa512448a030108fec607026955efbfde58153caee756e8aadec5068639fa" exitCode=0 Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.467686 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" event={"ID":"35e5b9ab-9f2d-4936-a208-ff1897002f2a","Type":"ContainerDied","Data":"bb8fa512448a030108fec607026955efbfde58153caee756e8aadec5068639fa"} Feb 03 07:01:18 crc kubenswrapper[4998]: I0203 07:01:18.578497 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.012722 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:19 crc kubenswrapper[4998]: W0203 07:01:19.017404 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc75f2619_952c_47cd_b6fe_17c655f2b81c.slice/crio-f5f3b301b75f93e78a35ad7597dadcaff1fbc16c1327235416a6bc319cb5f41f WatchSource:0}: Error finding container f5f3b301b75f93e78a35ad7597dadcaff1fbc16c1327235416a6bc319cb5f41f: Status 404 returned error can't find the container with id f5f3b301b75f93e78a35ad7597dadcaff1fbc16c1327235416a6bc319cb5f41f Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.474116 4998 generic.go:334] "Generic (PLEG): container finished" podID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerID="49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4" exitCode=0 Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.474226 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerDied","Data":"49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4"} Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.474605 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerStarted","Data":"f5f3b301b75f93e78a35ad7597dadcaff1fbc16c1327235416a6bc319cb5f41f"} Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.714566 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.882212 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util\") pod \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.882327 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdqr5\" (UniqueName: \"kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5\") pod \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.882492 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle\") pod \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\" (UID: \"35e5b9ab-9f2d-4936-a208-ff1897002f2a\") " Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.883801 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle" (OuterVolumeSpecName: "bundle") pod "35e5b9ab-9f2d-4936-a208-ff1897002f2a" (UID: "35e5b9ab-9f2d-4936-a208-ff1897002f2a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.890836 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5" (OuterVolumeSpecName: "kube-api-access-gdqr5") pod "35e5b9ab-9f2d-4936-a208-ff1897002f2a" (UID: "35e5b9ab-9f2d-4936-a208-ff1897002f2a"). InnerVolumeSpecName "kube-api-access-gdqr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.898050 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util" (OuterVolumeSpecName: "util") pod "35e5b9ab-9f2d-4936-a208-ff1897002f2a" (UID: "35e5b9ab-9f2d-4936-a208-ff1897002f2a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.983895 4998 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-util\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.983948 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdqr5\" (UniqueName: \"kubernetes.io/projected/35e5b9ab-9f2d-4936-a208-ff1897002f2a-kube-api-access-gdqr5\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:19 crc kubenswrapper[4998]: I0203 07:01:19.983960 4998 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35e5b9ab-9f2d-4936-a208-ff1897002f2a-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:20 crc kubenswrapper[4998]: I0203 07:01:20.482431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" event={"ID":"35e5b9ab-9f2d-4936-a208-ff1897002f2a","Type":"ContainerDied","Data":"b4abb2417e80449a7ee817fb7e4f29fafda4f59773a9058b6970a250dddda187"} Feb 03 07:01:20 crc kubenswrapper[4998]: I0203 07:01:20.482810 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4abb2417e80449a7ee817fb7e4f29fafda4f59773a9058b6970a250dddda187" Feb 03 07:01:20 crc kubenswrapper[4998]: I0203 07:01:20.482712 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.053649 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.053940 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tl8gv" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="registry-server" containerID="cri-o://e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894" gracePeriod=2 Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.400617 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.495019 4998 generic.go:334] "Generic (PLEG): container finished" podID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerID="1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365" exitCode=0 Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.495154 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerDied","Data":"1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365"} Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.499291 4998 generic.go:334] "Generic (PLEG): container finished" podID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerID="e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894" exitCode=0 Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.499328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerDied","Data":"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894"} Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.499350 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl8gv" event={"ID":"2f49d8a9-3796-4539-88f1-fd1305882ed0","Type":"ContainerDied","Data":"000bdcef4c7ea9eb6a572b779272a49818e579bcf1502bbe8736442d9c3b1821"} Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.499366 4998 scope.go:117] "RemoveContainer" containerID="e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.499474 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl8gv" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.525206 4998 scope.go:117] "RemoveContainer" containerID="4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.540182 4998 scope.go:117] "RemoveContainer" containerID="a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.557578 4998 scope.go:117] "RemoveContainer" containerID="e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894" Feb 03 07:01:21 crc kubenswrapper[4998]: E0203 07:01:21.557993 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894\": container with ID starting with e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894 not found: ID does not exist" containerID="e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.558040 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894"} err="failed to get container status \"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894\": rpc error: code = NotFound desc = could not find container \"e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894\": container with ID starting with e9245f836685f175133f5b2a2db32f471b8ba26fa7cecffa4cca2b1078b51894 not found: ID does not exist" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.558073 4998 scope.go:117] "RemoveContainer" containerID="4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a" Feb 03 07:01:21 crc kubenswrapper[4998]: E0203 07:01:21.558387 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a\": container with ID starting with 4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a not found: ID does not exist" containerID="4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.558415 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a"} err="failed to get container status \"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a\": rpc error: code = NotFound desc = could not find container \"4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a\": container with ID starting with 4bd8591b335e787b489c9922a18da7c74734676b3b6c7cdf85dea4e57676c35a not found: ID does not exist" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.558441 4998 scope.go:117] "RemoveContainer" containerID="a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1" Feb 03 07:01:21 crc kubenswrapper[4998]: E0203 07:01:21.558709 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1\": container with ID starting with a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1 not found: ID does not exist" containerID="a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.558765 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1"} err="failed to get container status \"a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1\": rpc error: code = NotFound desc = could not find container \"a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1\": container with ID starting with a0be8cb58c4040d31aed8b7be51ebe339f46953f1068edb6df5da41712505ab1 not found: ID does not exist" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.601326 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blw55\" (UniqueName: \"kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55\") pod \"2f49d8a9-3796-4539-88f1-fd1305882ed0\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.601465 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities\") pod \"2f49d8a9-3796-4539-88f1-fd1305882ed0\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.601562 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content\") pod \"2f49d8a9-3796-4539-88f1-fd1305882ed0\" (UID: \"2f49d8a9-3796-4539-88f1-fd1305882ed0\") " Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.602311 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities" (OuterVolumeSpecName: "utilities") pod "2f49d8a9-3796-4539-88f1-fd1305882ed0" (UID: "2f49d8a9-3796-4539-88f1-fd1305882ed0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.606495 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55" (OuterVolumeSpecName: "kube-api-access-blw55") pod "2f49d8a9-3796-4539-88f1-fd1305882ed0" (UID: "2f49d8a9-3796-4539-88f1-fd1305882ed0"). InnerVolumeSpecName "kube-api-access-blw55". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.650325 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f49d8a9-3796-4539-88f1-fd1305882ed0" (UID: "2f49d8a9-3796-4539-88f1-fd1305882ed0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.702637 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.702685 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blw55\" (UniqueName: \"kubernetes.io/projected/2f49d8a9-3796-4539-88f1-fd1305882ed0-kube-api-access-blw55\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.702695 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f49d8a9-3796-4539-88f1-fd1305882ed0-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.827762 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:21 crc kubenswrapper[4998]: I0203 07:01:21.832614 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tl8gv"] Feb 03 07:01:22 crc kubenswrapper[4998]: I0203 07:01:22.452222 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" path="/var/lib/kubelet/pods/2f49d8a9-3796-4539-88f1-fd1305882ed0/volumes" Feb 03 07:01:22 crc kubenswrapper[4998]: I0203 07:01:22.506941 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerStarted","Data":"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88"} Feb 03 07:01:22 crc kubenswrapper[4998]: I0203 07:01:22.524645 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pvsrq" podStartSLOduration=2.027234875 podStartE2EDuration="4.524627513s" podCreationTimestamp="2026-02-03 07:01:18 +0000 UTC" firstStartedPulling="2026-02-03 07:01:19.477015145 +0000 UTC m=+917.763708951" lastFinishedPulling="2026-02-03 07:01:21.974407773 +0000 UTC m=+920.261101589" observedRunningTime="2026-02-03 07:01:22.521910175 +0000 UTC m=+920.808603991" watchObservedRunningTime="2026-02-03 07:01:22.524627513 +0000 UTC m=+920.811321319" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.344610 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf"] Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.345811 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="pull" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.345881 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="pull" Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.345935 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="extract-content" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346001 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="extract-content" Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.346073 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="util" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346136 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="util" Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.346204 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="extract-utilities" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346270 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="extract-utilities" Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.346339 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="extract" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346411 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="extract" Feb 03 07:01:24 crc kubenswrapper[4998]: E0203 07:01:24.346479 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="registry-server" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346535 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="registry-server" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346720 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f49d8a9-3796-4539-88f1-fd1305882ed0" containerName="registry-server" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.346820 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e5b9ab-9f2d-4936-a208-ff1897002f2a" containerName="extract" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.347302 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.349023 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.349607 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.349615 4998 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-p2r5b" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.367132 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf"] Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.434139 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/999a9c93-98ce-4004-a6c7-552f645e4875-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.434186 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk9sf\" (UniqueName: \"kubernetes.io/projected/999a9c93-98ce-4004-a6c7-552f645e4875-kube-api-access-lk9sf\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.535043 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/999a9c93-98ce-4004-a6c7-552f645e4875-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.535107 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk9sf\" (UniqueName: \"kubernetes.io/projected/999a9c93-98ce-4004-a6c7-552f645e4875-kube-api-access-lk9sf\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.535544 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/999a9c93-98ce-4004-a6c7-552f645e4875-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.552768 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk9sf\" (UniqueName: \"kubernetes.io/projected/999a9c93-98ce-4004-a6c7-552f645e4875-kube-api-access-lk9sf\") pod \"cert-manager-operator-controller-manager-66c8bdd694-2pstf\" (UID: \"999a9c93-98ce-4004-a6c7-552f645e4875\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:24 crc kubenswrapper[4998]: I0203 07:01:24.661655 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" Feb 03 07:01:25 crc kubenswrapper[4998]: I0203 07:01:25.088904 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf"] Feb 03 07:01:25 crc kubenswrapper[4998]: W0203 07:01:25.094686 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod999a9c93_98ce_4004_a6c7_552f645e4875.slice/crio-8d402c27bc52b59337174eeb16169378d7029acc3de6c54b2c842351d07976e3 WatchSource:0}: Error finding container 8d402c27bc52b59337174eeb16169378d7029acc3de6c54b2c842351d07976e3: Status 404 returned error can't find the container with id 8d402c27bc52b59337174eeb16169378d7029acc3de6c54b2c842351d07976e3 Feb 03 07:01:25 crc kubenswrapper[4998]: I0203 07:01:25.526662 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" event={"ID":"999a9c93-98ce-4004-a6c7-552f645e4875","Type":"ContainerStarted","Data":"8d402c27bc52b59337174eeb16169378d7029acc3de6c54b2c842351d07976e3"} Feb 03 07:01:28 crc kubenswrapper[4998]: I0203 07:01:28.548700 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" event={"ID":"999a9c93-98ce-4004-a6c7-552f645e4875","Type":"ContainerStarted","Data":"c0fff3cc0987c7255d552d5e14e1d10c835f3c9fb5c3fa0d2e9f41554e3c3295"} Feb 03 07:01:28 crc kubenswrapper[4998]: I0203 07:01:28.575025 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-2pstf" podStartSLOduration=1.784945714 podStartE2EDuration="4.574997239s" podCreationTimestamp="2026-02-03 07:01:24 +0000 UTC" firstStartedPulling="2026-02-03 07:01:25.100142455 +0000 UTC m=+923.386836261" lastFinishedPulling="2026-02-03 07:01:27.89019398 +0000 UTC m=+926.176887786" observedRunningTime="2026-02-03 07:01:28.566325231 +0000 UTC m=+926.853019067" watchObservedRunningTime="2026-02-03 07:01:28.574997239 +0000 UTC m=+926.861691075" Feb 03 07:01:28 crc kubenswrapper[4998]: I0203 07:01:28.579559 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:28 crc kubenswrapper[4998]: I0203 07:01:28.579900 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:28 crc kubenswrapper[4998]: I0203 07:01:28.636479 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:29 crc kubenswrapper[4998]: I0203 07:01:29.606233 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:31 crc kubenswrapper[4998]: I0203 07:01:31.053500 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:32 crc kubenswrapper[4998]: I0203 07:01:32.570374 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pvsrq" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="registry-server" containerID="cri-o://4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88" gracePeriod=2 Feb 03 07:01:32 crc kubenswrapper[4998]: I0203 07:01:32.982268 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.150526 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mcth\" (UniqueName: \"kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth\") pod \"c75f2619-952c-47cd-b6fe-17c655f2b81c\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.150627 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities\") pod \"c75f2619-952c-47cd-b6fe-17c655f2b81c\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.150655 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content\") pod \"c75f2619-952c-47cd-b6fe-17c655f2b81c\" (UID: \"c75f2619-952c-47cd-b6fe-17c655f2b81c\") " Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.152521 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities" (OuterVolumeSpecName: "utilities") pod "c75f2619-952c-47cd-b6fe-17c655f2b81c" (UID: "c75f2619-952c-47cd-b6fe-17c655f2b81c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.163536 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth" (OuterVolumeSpecName: "kube-api-access-6mcth") pod "c75f2619-952c-47cd-b6fe-17c655f2b81c" (UID: "c75f2619-952c-47cd-b6fe-17c655f2b81c"). InnerVolumeSpecName "kube-api-access-6mcth". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.198960 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c75f2619-952c-47cd-b6fe-17c655f2b81c" (UID: "c75f2619-952c-47cd-b6fe-17c655f2b81c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.251920 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.251966 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mcth\" (UniqueName: \"kubernetes.io/projected/c75f2619-952c-47cd-b6fe-17c655f2b81c-kube-api-access-6mcth\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.251978 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c75f2619-952c-47cd-b6fe-17c655f2b81c-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.585510 4998 generic.go:334] "Generic (PLEG): container finished" podID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerID="4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88" exitCode=0 Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.585689 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerDied","Data":"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88"} Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.585768 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvsrq" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.587098 4998 scope.go:117] "RemoveContainer" containerID="4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.587009 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvsrq" event={"ID":"c75f2619-952c-47cd-b6fe-17c655f2b81c","Type":"ContainerDied","Data":"f5f3b301b75f93e78a35ad7597dadcaff1fbc16c1327235416a6bc319cb5f41f"} Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.618834 4998 scope.go:117] "RemoveContainer" containerID="1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.631177 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.635072 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pvsrq"] Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.637707 4998 scope.go:117] "RemoveContainer" containerID="49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.662936 4998 scope.go:117] "RemoveContainer" containerID="4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88" Feb 03 07:01:33 crc kubenswrapper[4998]: E0203 07:01:33.663301 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88\": container with ID starting with 4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88 not found: ID does not exist" containerID="4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.663331 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88"} err="failed to get container status \"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88\": rpc error: code = NotFound desc = could not find container \"4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88\": container with ID starting with 4c8655507237d8276b213ac9880becf18c5bd523934b284dfe35b86a019b9f88 not found: ID does not exist" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.663352 4998 scope.go:117] "RemoveContainer" containerID="1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365" Feb 03 07:01:33 crc kubenswrapper[4998]: E0203 07:01:33.663662 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365\": container with ID starting with 1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365 not found: ID does not exist" containerID="1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.663693 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365"} err="failed to get container status \"1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365\": rpc error: code = NotFound desc = could not find container \"1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365\": container with ID starting with 1f0a2868405fdf99efa0e5e928963652431016104366164e6ba57544aba60365 not found: ID does not exist" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.663711 4998 scope.go:117] "RemoveContainer" containerID="49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4" Feb 03 07:01:33 crc kubenswrapper[4998]: E0203 07:01:33.664515 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4\": container with ID starting with 49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4 not found: ID does not exist" containerID="49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4" Feb 03 07:01:33 crc kubenswrapper[4998]: I0203 07:01:33.664541 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4"} err="failed to get container status \"49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4\": rpc error: code = NotFound desc = could not find container \"49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4\": container with ID starting with 49d0c93e1ab6ff52cb51f56891aa0b04fdb2eeb9a476c42139d37bcdc794f6e4 not found: ID does not exist" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.435310 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" path="/var/lib/kubelet/pods/c75f2619-952c-47cd-b6fe-17c655f2b81c/volumes" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.752980 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-qpgz8"] Feb 03 07:01:34 crc kubenswrapper[4998]: E0203 07:01:34.753258 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="extract-utilities" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.753274 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="extract-utilities" Feb 03 07:01:34 crc kubenswrapper[4998]: E0203 07:01:34.753293 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="registry-server" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.753301 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="registry-server" Feb 03 07:01:34 crc kubenswrapper[4998]: E0203 07:01:34.753319 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="extract-content" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.753329 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="extract-content" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.753469 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c75f2619-952c-47cd-b6fe-17c655f2b81c" containerName="registry-server" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.753954 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.758201 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.758349 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.758501 4998 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-cdjhw" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.775101 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-qpgz8"] Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.871156 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj5fb\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-kube-api-access-rj5fb\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.871218 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.972947 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-8wvrj"] Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.973079 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.973199 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj5fb\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-kube-api-access-rj5fb\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.973625 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.975007 4998 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-j8dww" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.983670 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-8wvrj"] Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.992899 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj5fb\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-kube-api-access-rj5fb\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:34 crc kubenswrapper[4998]: I0203 07:01:34.993313 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d2252-d1e5-4558-a6b5-892087c30d30-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-qpgz8\" (UID: \"bf9d2252-d1e5-4558-a6b5-892087c30d30\") " pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.069845 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.074605 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.074676 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64gjg\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-kube-api-access-64gjg\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.175483 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.175932 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64gjg\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-kube-api-access-64gjg\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.201070 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.203703 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64gjg\" (UniqueName: \"kubernetes.io/projected/de82203b-599e-4c92-afed-08dd43dabf88-kube-api-access-64gjg\") pod \"cert-manager-cainjector-5545bd876-8wvrj\" (UID: \"de82203b-599e-4c92-afed-08dd43dabf88\") " pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.281304 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-qpgz8"] Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.286642 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.599667 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" event={"ID":"bf9d2252-d1e5-4558-a6b5-892087c30d30","Type":"ContainerStarted","Data":"9a6d23baddeeeb95a5e7e5ce2312218fae7ee4f8e1a3de94d350aecbe328c1b9"} Feb 03 07:01:35 crc kubenswrapper[4998]: I0203 07:01:35.740338 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-8wvrj"] Feb 03 07:01:35 crc kubenswrapper[4998]: W0203 07:01:35.745992 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde82203b_599e_4c92_afed_08dd43dabf88.slice/crio-aef42b3d5c81df37dcda4d75ec64b68193ef35c1ba9dbb38acf32d2819034770 WatchSource:0}: Error finding container aef42b3d5c81df37dcda4d75ec64b68193ef35c1ba9dbb38acf32d2819034770: Status 404 returned error can't find the container with id aef42b3d5c81df37dcda4d75ec64b68193ef35c1ba9dbb38acf32d2819034770 Feb 03 07:01:36 crc kubenswrapper[4998]: I0203 07:01:36.609974 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" event={"ID":"de82203b-599e-4c92-afed-08dd43dabf88","Type":"ContainerStarted","Data":"aef42b3d5c81df37dcda4d75ec64b68193ef35c1ba9dbb38acf32d2819034770"} Feb 03 07:01:40 crc kubenswrapper[4998]: I0203 07:01:40.635862 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" event={"ID":"de82203b-599e-4c92-afed-08dd43dabf88","Type":"ContainerStarted","Data":"cb9b07719ee95cd5c42e16b5e70b2139a3d88814288c66dc8673d8cff5ad5a1c"} Feb 03 07:01:40 crc kubenswrapper[4998]: I0203 07:01:40.637171 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" event={"ID":"bf9d2252-d1e5-4558-a6b5-892087c30d30","Type":"ContainerStarted","Data":"02830328eb485ff3e0303523e915751ab3c0b8eef27c09ab45f48270d1fa23d5"} Feb 03 07:01:40 crc kubenswrapper[4998]: I0203 07:01:40.637301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:40 crc kubenswrapper[4998]: I0203 07:01:40.652808 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-5545bd876-8wvrj" podStartSLOduration=2.655829583 podStartE2EDuration="6.652791002s" podCreationTimestamp="2026-02-03 07:01:34 +0000 UTC" firstStartedPulling="2026-02-03 07:01:35.749198993 +0000 UTC m=+934.035892809" lastFinishedPulling="2026-02-03 07:01:39.746160412 +0000 UTC m=+938.032854228" observedRunningTime="2026-02-03 07:01:40.650573998 +0000 UTC m=+938.937267804" watchObservedRunningTime="2026-02-03 07:01:40.652791002 +0000 UTC m=+938.939484808" Feb 03 07:01:40 crc kubenswrapper[4998]: I0203 07:01:40.674208 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" podStartSLOduration=2.226155738 podStartE2EDuration="6.674189473s" podCreationTimestamp="2026-02-03 07:01:34 +0000 UTC" firstStartedPulling="2026-02-03 07:01:35.282877521 +0000 UTC m=+933.569571327" lastFinishedPulling="2026-02-03 07:01:39.730911236 +0000 UTC m=+938.017605062" observedRunningTime="2026-02-03 07:01:40.669127879 +0000 UTC m=+938.955821705" watchObservedRunningTime="2026-02-03 07:01:40.674189473 +0000 UTC m=+938.960883289" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.057381 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-545d4d4674-x5nsm"] Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.058514 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.060634 4998 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-ztxzq" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.070732 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-bound-sa-token\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.070809 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqcpk\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-kube-api-access-cqcpk\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.072668 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-x5nsm"] Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.172593 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-bound-sa-token\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.172900 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqcpk\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-kube-api-access-cqcpk\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.189980 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-bound-sa-token\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.190083 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqcpk\" (UniqueName: \"kubernetes.io/projected/033d2c3c-ef4a-43a3-b175-79321bfb5aa2-kube-api-access-cqcpk\") pod \"cert-manager-545d4d4674-x5nsm\" (UID: \"033d2c3c-ef4a-43a3-b175-79321bfb5aa2\") " pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.418659 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-x5nsm" Feb 03 07:01:41 crc kubenswrapper[4998]: I0203 07:01:41.646630 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-x5nsm"] Feb 03 07:01:41 crc kubenswrapper[4998]: W0203 07:01:41.650115 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod033d2c3c_ef4a_43a3_b175_79321bfb5aa2.slice/crio-3f4d139f7befb8b0949683839c6503b778aecc83848cdcdf924cc98971488e6a WatchSource:0}: Error finding container 3f4d139f7befb8b0949683839c6503b778aecc83848cdcdf924cc98971488e6a: Status 404 returned error can't find the container with id 3f4d139f7befb8b0949683839c6503b778aecc83848cdcdf924cc98971488e6a Feb 03 07:01:42 crc kubenswrapper[4998]: I0203 07:01:42.653594 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-x5nsm" event={"ID":"033d2c3c-ef4a-43a3-b175-79321bfb5aa2","Type":"ContainerStarted","Data":"8891cce0a3d9edaa150966ab02d8dfb377be4b4c073eb50aa5ce23373a93b194"} Feb 03 07:01:42 crc kubenswrapper[4998]: I0203 07:01:42.653911 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-x5nsm" event={"ID":"033d2c3c-ef4a-43a3-b175-79321bfb5aa2","Type":"ContainerStarted","Data":"3f4d139f7befb8b0949683839c6503b778aecc83848cdcdf924cc98971488e6a"} Feb 03 07:01:42 crc kubenswrapper[4998]: I0203 07:01:42.672384 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-545d4d4674-x5nsm" podStartSLOduration=1.67236545 podStartE2EDuration="1.67236545s" podCreationTimestamp="2026-02-03 07:01:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:01:42.666799091 +0000 UTC m=+940.953493007" watchObservedRunningTime="2026-02-03 07:01:42.67236545 +0000 UTC m=+940.959059266" Feb 03 07:01:42 crc kubenswrapper[4998]: I0203 07:01:42.754255 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:01:42 crc kubenswrapper[4998]: I0203 07:01:42.754340 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:01:45 crc kubenswrapper[4998]: I0203 07:01:45.071936 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-6888856db4-qpgz8" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.260175 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.261495 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.263801 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-lnph2" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.263994 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.265393 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.269477 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.426885 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mncsq\" (UniqueName: \"kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq\") pod \"openstack-operator-index-b97lv\" (UID: \"f9e8e493-afeb-4146-9eec-c2932ce60bea\") " pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.528217 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mncsq\" (UniqueName: \"kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq\") pod \"openstack-operator-index-b97lv\" (UID: \"f9e8e493-afeb-4146-9eec-c2932ce60bea\") " pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.547577 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mncsq\" (UniqueName: \"kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq\") pod \"openstack-operator-index-b97lv\" (UID: \"f9e8e493-afeb-4146-9eec-c2932ce60bea\") " pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.582325 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:53 crc kubenswrapper[4998]: I0203 07:01:53.977019 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:01:53 crc kubenswrapper[4998]: W0203 07:01:53.984955 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9e8e493_afeb_4146_9eec_c2932ce60bea.slice/crio-2447d720cdf7b45c5d3dcb5ee48a87835aad6ecb8b3965a836097f3f2e229558 WatchSource:0}: Error finding container 2447d720cdf7b45c5d3dcb5ee48a87835aad6ecb8b3965a836097f3f2e229558: Status 404 returned error can't find the container with id 2447d720cdf7b45c5d3dcb5ee48a87835aad6ecb8b3965a836097f3f2e229558 Feb 03 07:01:54 crc kubenswrapper[4998]: I0203 07:01:54.720599 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b97lv" event={"ID":"f9e8e493-afeb-4146-9eec-c2932ce60bea","Type":"ContainerStarted","Data":"2447d720cdf7b45c5d3dcb5ee48a87835aad6ecb8b3965a836097f3f2e229558"} Feb 03 07:01:55 crc kubenswrapper[4998]: I0203 07:01:55.727345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b97lv" event={"ID":"f9e8e493-afeb-4146-9eec-c2932ce60bea","Type":"ContainerStarted","Data":"cc54c851678898a7dc18ae187136b825a571fe77ff5885e15bd77bb54c995b58"} Feb 03 07:01:55 crc kubenswrapper[4998]: I0203 07:01:55.741075 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-b97lv" podStartSLOduration=1.927839714 podStartE2EDuration="2.741054973s" podCreationTimestamp="2026-02-03 07:01:53 +0000 UTC" firstStartedPulling="2026-02-03 07:01:53.987660764 +0000 UTC m=+952.274354570" lastFinishedPulling="2026-02-03 07:01:54.800876023 +0000 UTC m=+953.087569829" observedRunningTime="2026-02-03 07:01:55.739610451 +0000 UTC m=+954.026304297" watchObservedRunningTime="2026-02-03 07:01:55.741054973 +0000 UTC m=+954.027748779" Feb 03 07:01:58 crc kubenswrapper[4998]: I0203 07:01:58.451495 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:01:58 crc kubenswrapper[4998]: I0203 07:01:58.451959 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-b97lv" podUID="f9e8e493-afeb-4146-9eec-c2932ce60bea" containerName="registry-server" containerID="cri-o://cc54c851678898a7dc18ae187136b825a571fe77ff5885e15bd77bb54c995b58" gracePeriod=2 Feb 03 07:01:58 crc kubenswrapper[4998]: I0203 07:01:58.750067 4998 generic.go:334] "Generic (PLEG): container finished" podID="f9e8e493-afeb-4146-9eec-c2932ce60bea" containerID="cc54c851678898a7dc18ae187136b825a571fe77ff5885e15bd77bb54c995b58" exitCode=0 Feb 03 07:01:58 crc kubenswrapper[4998]: I0203 07:01:58.750275 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b97lv" event={"ID":"f9e8e493-afeb-4146-9eec-c2932ce60bea","Type":"ContainerDied","Data":"cc54c851678898a7dc18ae187136b825a571fe77ff5885e15bd77bb54c995b58"} Feb 03 07:01:58 crc kubenswrapper[4998]: I0203 07:01:58.811250 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.005645 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mncsq\" (UniqueName: \"kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq\") pod \"f9e8e493-afeb-4146-9eec-c2932ce60bea\" (UID: \"f9e8e493-afeb-4146-9eec-c2932ce60bea\") " Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.012876 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq" (OuterVolumeSpecName: "kube-api-access-mncsq") pod "f9e8e493-afeb-4146-9eec-c2932ce60bea" (UID: "f9e8e493-afeb-4146-9eec-c2932ce60bea"). InnerVolumeSpecName "kube-api-access-mncsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.060063 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-pm5bm"] Feb 03 07:01:59 crc kubenswrapper[4998]: E0203 07:01:59.060381 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e8e493-afeb-4146-9eec-c2932ce60bea" containerName="registry-server" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.060415 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e8e493-afeb-4146-9eec-c2932ce60bea" containerName="registry-server" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.060563 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9e8e493-afeb-4146-9eec-c2932ce60bea" containerName="registry-server" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.061133 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.071160 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pm5bm"] Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.112282 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9tsp\" (UniqueName: \"kubernetes.io/projected/e43b0989-9c17-455c-827a-6db9db8a0039-kube-api-access-w9tsp\") pod \"openstack-operator-index-pm5bm\" (UID: \"e43b0989-9c17-455c-827a-6db9db8a0039\") " pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.112367 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mncsq\" (UniqueName: \"kubernetes.io/projected/f9e8e493-afeb-4146-9eec-c2932ce60bea-kube-api-access-mncsq\") on node \"crc\" DevicePath \"\"" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.213162 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9tsp\" (UniqueName: \"kubernetes.io/projected/e43b0989-9c17-455c-827a-6db9db8a0039-kube-api-access-w9tsp\") pod \"openstack-operator-index-pm5bm\" (UID: \"e43b0989-9c17-455c-827a-6db9db8a0039\") " pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.228336 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9tsp\" (UniqueName: \"kubernetes.io/projected/e43b0989-9c17-455c-827a-6db9db8a0039-kube-api-access-w9tsp\") pod \"openstack-operator-index-pm5bm\" (UID: \"e43b0989-9c17-455c-827a-6db9db8a0039\") " pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.419263 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.636644 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pm5bm"] Feb 03 07:01:59 crc kubenswrapper[4998]: W0203 07:01:59.643398 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode43b0989_9c17_455c_827a_6db9db8a0039.slice/crio-9f7dc89f87a359b54e26ed18785d8bbb58db381730d899d030394b05637a06a8 WatchSource:0}: Error finding container 9f7dc89f87a359b54e26ed18785d8bbb58db381730d899d030394b05637a06a8: Status 404 returned error can't find the container with id 9f7dc89f87a359b54e26ed18785d8bbb58db381730d899d030394b05637a06a8 Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.756389 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b97lv" event={"ID":"f9e8e493-afeb-4146-9eec-c2932ce60bea","Type":"ContainerDied","Data":"2447d720cdf7b45c5d3dcb5ee48a87835aad6ecb8b3965a836097f3f2e229558"} Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.756427 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b97lv" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.756725 4998 scope.go:117] "RemoveContainer" containerID="cc54c851678898a7dc18ae187136b825a571fe77ff5885e15bd77bb54c995b58" Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.757215 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pm5bm" event={"ID":"e43b0989-9c17-455c-827a-6db9db8a0039","Type":"ContainerStarted","Data":"9f7dc89f87a359b54e26ed18785d8bbb58db381730d899d030394b05637a06a8"} Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.793880 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:01:59 crc kubenswrapper[4998]: I0203 07:01:59.800620 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-b97lv"] Feb 03 07:02:00 crc kubenswrapper[4998]: I0203 07:02:00.435690 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9e8e493-afeb-4146-9eec-c2932ce60bea" path="/var/lib/kubelet/pods/f9e8e493-afeb-4146-9eec-c2932ce60bea/volumes" Feb 03 07:02:00 crc kubenswrapper[4998]: I0203 07:02:00.767459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pm5bm" event={"ID":"e43b0989-9c17-455c-827a-6db9db8a0039","Type":"ContainerStarted","Data":"05b7f5a8c3bdca2ed7be335f861c74d49a71e7b594c79dc9296cd791eb0dc11c"} Feb 03 07:02:00 crc kubenswrapper[4998]: I0203 07:02:00.787189 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-pm5bm" podStartSLOduration=1.358187602 podStartE2EDuration="1.787171377s" podCreationTimestamp="2026-02-03 07:01:59 +0000 UTC" firstStartedPulling="2026-02-03 07:01:59.649492041 +0000 UTC m=+957.936185847" lastFinishedPulling="2026-02-03 07:02:00.078475816 +0000 UTC m=+958.365169622" observedRunningTime="2026-02-03 07:02:00.780093745 +0000 UTC m=+959.066787601" watchObservedRunningTime="2026-02-03 07:02:00.787171377 +0000 UTC m=+959.073865183" Feb 03 07:02:09 crc kubenswrapper[4998]: I0203 07:02:09.420316 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:02:09 crc kubenswrapper[4998]: I0203 07:02:09.420749 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:02:09 crc kubenswrapper[4998]: I0203 07:02:09.447771 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:02:09 crc kubenswrapper[4998]: I0203 07:02:09.851301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-pm5bm" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.100263 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd"] Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.101460 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.106818 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-c9lwl" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.111579 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd"] Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.174421 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.174496 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.174664 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdq69\" (UniqueName: \"kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.275441 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.275510 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.275553 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdq69\" (UniqueName: \"kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.276052 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.276105 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.293861 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdq69\" (UniqueName: \"kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69\") pod \"805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.418912 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.754284 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.754557 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:02:12 crc kubenswrapper[4998]: I0203 07:02:12.855359 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd"] Feb 03 07:02:12 crc kubenswrapper[4998]: W0203 07:02:12.863185 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec8df89a_256d_4eb4_97dc_57d42b4f34f5.slice/crio-f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b WatchSource:0}: Error finding container f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b: Status 404 returned error can't find the container with id f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b Feb 03 07:02:13 crc kubenswrapper[4998]: I0203 07:02:13.850993 4998 generic.go:334] "Generic (PLEG): container finished" podID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerID="463b12ea740abd257f89ffcfab1afa7cbc7fff0c6884b6447d36131c26820885" exitCode=0 Feb 03 07:02:13 crc kubenswrapper[4998]: I0203 07:02:13.851085 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" event={"ID":"ec8df89a-256d-4eb4-97dc-57d42b4f34f5","Type":"ContainerDied","Data":"463b12ea740abd257f89ffcfab1afa7cbc7fff0c6884b6447d36131c26820885"} Feb 03 07:02:13 crc kubenswrapper[4998]: I0203 07:02:13.851297 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" event={"ID":"ec8df89a-256d-4eb4-97dc-57d42b4f34f5","Type":"ContainerStarted","Data":"f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b"} Feb 03 07:02:14 crc kubenswrapper[4998]: I0203 07:02:14.859985 4998 generic.go:334] "Generic (PLEG): container finished" podID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerID="83be8cd5e86d54c917b6aff2cdd96a258a429c1fde801ef39ced5ce48bc23516" exitCode=0 Feb 03 07:02:14 crc kubenswrapper[4998]: I0203 07:02:14.860042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" event={"ID":"ec8df89a-256d-4eb4-97dc-57d42b4f34f5","Type":"ContainerDied","Data":"83be8cd5e86d54c917b6aff2cdd96a258a429c1fde801ef39ced5ce48bc23516"} Feb 03 07:02:15 crc kubenswrapper[4998]: I0203 07:02:15.870362 4998 generic.go:334] "Generic (PLEG): container finished" podID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerID="7d6f4c48965e9eb099f34f0bf5fcfd7bf13381d14331015247116e12c746d25d" exitCode=0 Feb 03 07:02:15 crc kubenswrapper[4998]: I0203 07:02:15.870405 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" event={"ID":"ec8df89a-256d-4eb4-97dc-57d42b4f34f5","Type":"ContainerDied","Data":"7d6f4c48965e9eb099f34f0bf5fcfd7bf13381d14331015247116e12c746d25d"} Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.105974 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.241224 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle\") pod \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.241958 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle" (OuterVolumeSpecName: "bundle") pod "ec8df89a-256d-4eb4-97dc-57d42b4f34f5" (UID: "ec8df89a-256d-4eb4-97dc-57d42b4f34f5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.242116 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util\") pod \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.242280 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdq69\" (UniqueName: \"kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69\") pod \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\" (UID: \"ec8df89a-256d-4eb4-97dc-57d42b4f34f5\") " Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.242755 4998 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.248421 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69" (OuterVolumeSpecName: "kube-api-access-jdq69") pod "ec8df89a-256d-4eb4-97dc-57d42b4f34f5" (UID: "ec8df89a-256d-4eb4-97dc-57d42b4f34f5"). InnerVolumeSpecName "kube-api-access-jdq69". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.255439 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util" (OuterVolumeSpecName: "util") pod "ec8df89a-256d-4eb4-97dc-57d42b4f34f5" (UID: "ec8df89a-256d-4eb4-97dc-57d42b4f34f5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.344306 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdq69\" (UniqueName: \"kubernetes.io/projected/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-kube-api-access-jdq69\") on node \"crc\" DevicePath \"\"" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.344344 4998 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ec8df89a-256d-4eb4-97dc-57d42b4f34f5-util\") on node \"crc\" DevicePath \"\"" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.882323 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" event={"ID":"ec8df89a-256d-4eb4-97dc-57d42b4f34f5","Type":"ContainerDied","Data":"f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b"} Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.882363 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3addb1d5ad39b95394adf444519e72a7aa278e0d00a7780538eaf4e2d72573b" Feb 03 07:02:17 crc kubenswrapper[4998]: I0203 07:02:17.882611 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.470659 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm"] Feb 03 07:02:20 crc kubenswrapper[4998]: E0203 07:02:20.471604 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="pull" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.471621 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="pull" Feb 03 07:02:20 crc kubenswrapper[4998]: E0203 07:02:20.471633 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="extract" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.471641 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="extract" Feb 03 07:02:20 crc kubenswrapper[4998]: E0203 07:02:20.471665 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="util" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.471671 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="util" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.471838 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec8df89a-256d-4eb4-97dc-57d42b4f34f5" containerName="extract" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.472396 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.475620 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-nrh2r" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.483258 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrt76\" (UniqueName: \"kubernetes.io/projected/415bf1be-506f-4fc6-b7f5-abf9a0134900-kube-api-access-mrt76\") pod \"openstack-operator-controller-init-6bf6665fd-l22vm\" (UID: \"415bf1be-506f-4fc6-b7f5-abf9a0134900\") " pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.487313 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm"] Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.586621 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrt76\" (UniqueName: \"kubernetes.io/projected/415bf1be-506f-4fc6-b7f5-abf9a0134900-kube-api-access-mrt76\") pod \"openstack-operator-controller-init-6bf6665fd-l22vm\" (UID: \"415bf1be-506f-4fc6-b7f5-abf9a0134900\") " pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.619838 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrt76\" (UniqueName: \"kubernetes.io/projected/415bf1be-506f-4fc6-b7f5-abf9a0134900-kube-api-access-mrt76\") pod \"openstack-operator-controller-init-6bf6665fd-l22vm\" (UID: \"415bf1be-506f-4fc6-b7f5-abf9a0134900\") " pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:20 crc kubenswrapper[4998]: I0203 07:02:20.790761 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:21 crc kubenswrapper[4998]: I0203 07:02:21.223716 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm"] Feb 03 07:02:21 crc kubenswrapper[4998]: I0203 07:02:21.908258 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" event={"ID":"415bf1be-506f-4fc6-b7f5-abf9a0134900","Type":"ContainerStarted","Data":"b7847a19cb3d478c3d7f9bbe1d356e9bdc3917931e14cba13023c72a8c6b26af"} Feb 03 07:02:25 crc kubenswrapper[4998]: I0203 07:02:25.932736 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" event={"ID":"415bf1be-506f-4fc6-b7f5-abf9a0134900","Type":"ContainerStarted","Data":"ee5f9cc12a7a68ceb09cf2733a775f91ad0254232d22361deae68c9703585aed"} Feb 03 07:02:25 crc kubenswrapper[4998]: I0203 07:02:25.933379 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:25 crc kubenswrapper[4998]: I0203 07:02:25.968965 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" podStartSLOduration=1.890815862 podStartE2EDuration="5.968942854s" podCreationTimestamp="2026-02-03 07:02:20 +0000 UTC" firstStartedPulling="2026-02-03 07:02:21.228677881 +0000 UTC m=+979.515371687" lastFinishedPulling="2026-02-03 07:02:25.306804863 +0000 UTC m=+983.593498679" observedRunningTime="2026-02-03 07:02:25.96671349 +0000 UTC m=+984.253407306" watchObservedRunningTime="2026-02-03 07:02:25.968942854 +0000 UTC m=+984.255636670" Feb 03 07:02:30 crc kubenswrapper[4998]: I0203 07:02:30.793203 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-6bf6665fd-l22vm" Feb 03 07:02:42 crc kubenswrapper[4998]: I0203 07:02:42.754633 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:02:42 crc kubenswrapper[4998]: I0203 07:02:42.755259 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:02:42 crc kubenswrapper[4998]: I0203 07:02:42.755315 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:02:42 crc kubenswrapper[4998]: I0203 07:02:42.755982 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:02:42 crc kubenswrapper[4998]: I0203 07:02:42.756048 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576" gracePeriod=600 Feb 03 07:02:43 crc kubenswrapper[4998]: I0203 07:02:43.041841 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576" exitCode=0 Feb 03 07:02:43 crc kubenswrapper[4998]: I0203 07:02:43.041886 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576"} Feb 03 07:02:43 crc kubenswrapper[4998]: I0203 07:02:43.042293 4998 scope.go:117] "RemoveContainer" containerID="a715304a8e506b324a18b7e44d744d3e43f59a2529eede37e78f5f26ef938332" Feb 03 07:02:44 crc kubenswrapper[4998]: I0203 07:02:44.052968 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d"} Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.729645 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.731160 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.733246 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zv5cg" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.744761 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.745485 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.747203 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7fxh9" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.760302 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.761340 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.762811 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-bkbrn" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.769393 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.776667 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.785399 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-gztww"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.786369 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.788820 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-ffzvh" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.791427 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.800590 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.801398 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.804835 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-rvl85" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.812964 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-gztww"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.824337 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.847617 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.848375 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.850381 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-nmzsx" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.870999 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-7fllq"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.871977 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.874410 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.877237 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-cdk8f" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.884273 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxlt6\" (UniqueName: \"kubernetes.io/projected/5254fd85-6147-4f7f-9ed7-d5491795590e-kube-api-access-cxlt6\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-jqjg8\" (UID: \"5254fd85-6147-4f7f-9ed7-d5491795590e\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.884316 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vwml\" (UniqueName: \"kubernetes.io/projected/555310a7-1022-4224-8329-56cf0b598983-kube-api-access-4vwml\") pod \"cinder-operator-controller-manager-8d874c8fc-cqsb2\" (UID: \"555310a7-1022-4224-8329-56cf0b598983\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.884726 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbtd\" (UniqueName: \"kubernetes.io/projected/15e1bb02-71fd-439f-b8b0-769aebffd30e-kube-api-access-7gbtd\") pod \"designate-operator-controller-manager-6d9697b7f4-r7xwj\" (UID: \"15e1bb02-71fd-439f-b8b0-769aebffd30e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.891500 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.906048 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.906883 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.911014 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-mjvrk" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.913367 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-7fllq"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.933754 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.964117 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.964987 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.965076 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.967119 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-jn5wx" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985609 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbtd\" (UniqueName: \"kubernetes.io/projected/15e1bb02-71fd-439f-b8b0-769aebffd30e-kube-api-access-7gbtd\") pod \"designate-operator-controller-manager-6d9697b7f4-r7xwj\" (UID: \"15e1bb02-71fd-439f-b8b0-769aebffd30e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985720 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fdt5\" (UniqueName: \"kubernetes.io/projected/e6885e8a-0fe6-44be-93e6-b5c663958e1f-kube-api-access-9fdt5\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985748 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4zcp\" (UniqueName: \"kubernetes.io/projected/c3e9afd3-207f-4a98-ab9a-1abb166da517-kube-api-access-s4zcp\") pod \"horizon-operator-controller-manager-5fb775575f-6jgrc\" (UID: \"c3e9afd3-207f-4a98-ab9a-1abb166da517\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985800 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkxfb\" (UniqueName: \"kubernetes.io/projected/00c60d3a-58c3-4ad9-a015-1dacdebef5dc-kube-api-access-dkxfb\") pod \"glance-operator-controller-manager-8886f4c47-gztww\" (UID: \"00c60d3a-58c3-4ad9-a015-1dacdebef5dc\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985827 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxlt6\" (UniqueName: \"kubernetes.io/projected/5254fd85-6147-4f7f-9ed7-d5491795590e-kube-api-access-cxlt6\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-jqjg8\" (UID: \"5254fd85-6147-4f7f-9ed7-d5491795590e\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985852 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985891 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vwml\" (UniqueName: \"kubernetes.io/projected/555310a7-1022-4224-8329-56cf0b598983-kube-api-access-4vwml\") pod \"cinder-operator-controller-manager-8d874c8fc-cqsb2\" (UID: \"555310a7-1022-4224-8329-56cf0b598983\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.985928 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdr2l\" (UniqueName: \"kubernetes.io/projected/44f95b1e-8d2d-4db0-8434-b4ae01d46f98-kube-api-access-rdr2l\") pod \"heat-operator-controller-manager-69d6db494d-5jdj9\" (UID: \"44f95b1e-8d2d-4db0-8434-b4ae01d46f98\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.987948 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs"] Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.988727 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.994419 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-849s7" Feb 03 07:02:49 crc kubenswrapper[4998]: I0203 07:02:49.998865 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:49.999750 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.004482 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-sgmnx" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.044693 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vwml\" (UniqueName: \"kubernetes.io/projected/555310a7-1022-4224-8329-56cf0b598983-kube-api-access-4vwml\") pod \"cinder-operator-controller-manager-8d874c8fc-cqsb2\" (UID: \"555310a7-1022-4224-8329-56cf0b598983\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.045537 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxlt6\" (UniqueName: \"kubernetes.io/projected/5254fd85-6147-4f7f-9ed7-d5491795590e-kube-api-access-cxlt6\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-jqjg8\" (UID: \"5254fd85-6147-4f7f-9ed7-d5491795590e\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.046599 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.055726 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbtd\" (UniqueName: \"kubernetes.io/projected/15e1bb02-71fd-439f-b8b0-769aebffd30e-kube-api-access-7gbtd\") pod \"designate-operator-controller-manager-6d9697b7f4-r7xwj\" (UID: \"15e1bb02-71fd-439f-b8b0-769aebffd30e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.056835 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.082675 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.088052 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090128 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fdt5\" (UniqueName: \"kubernetes.io/projected/e6885e8a-0fe6-44be-93e6-b5c663958e1f-kube-api-access-9fdt5\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090175 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4zcp\" (UniqueName: \"kubernetes.io/projected/c3e9afd3-207f-4a98-ab9a-1abb166da517-kube-api-access-s4zcp\") pod \"horizon-operator-controller-manager-5fb775575f-6jgrc\" (UID: \"c3e9afd3-207f-4a98-ab9a-1abb166da517\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090210 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkxfb\" (UniqueName: \"kubernetes.io/projected/00c60d3a-58c3-4ad9-a015-1dacdebef5dc-kube-api-access-dkxfb\") pod \"glance-operator-controller-manager-8886f4c47-gztww\" (UID: \"00c60d3a-58c3-4ad9-a015-1dacdebef5dc\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090233 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090276 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nspdg\" (UniqueName: \"kubernetes.io/projected/6966131b-ab1c-4de2-9a32-8bcbd1d26c4a-kube-api-access-nspdg\") pod \"ironic-operator-controller-manager-5f4b8bd54d-t2v45\" (UID: \"6966131b-ab1c-4de2-9a32-8bcbd1d26c4a\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090314 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdr2l\" (UniqueName: \"kubernetes.io/projected/44f95b1e-8d2d-4db0-8434-b4ae01d46f98-kube-api-access-rdr2l\") pod \"heat-operator-controller-manager-69d6db494d-5jdj9\" (UID: \"44f95b1e-8d2d-4db0-8434-b4ae01d46f98\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.090345 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66x44\" (UniqueName: \"kubernetes.io/projected/c96b6def-54b9-4b76-870c-7e504e58cca9-kube-api-access-66x44\") pod \"keystone-operator-controller-manager-84f48565d4-vxnv2\" (UID: \"c96b6def-54b9-4b76-870c-7e504e58cca9\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.090554 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.090634 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:02:50.59061139 +0000 UTC m=+1008.877305196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.097589 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.134640 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdr2l\" (UniqueName: \"kubernetes.io/projected/44f95b1e-8d2d-4db0-8434-b4ae01d46f98-kube-api-access-rdr2l\") pod \"heat-operator-controller-manager-69d6db494d-5jdj9\" (UID: \"44f95b1e-8d2d-4db0-8434-b4ae01d46f98\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.136833 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.137752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fdt5\" (UniqueName: \"kubernetes.io/projected/e6885e8a-0fe6-44be-93e6-b5c663958e1f-kube-api-access-9fdt5\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.139852 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.143595 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4zcp\" (UniqueName: \"kubernetes.io/projected/c3e9afd3-207f-4a98-ab9a-1abb166da517-kube-api-access-s4zcp\") pod \"horizon-operator-controller-manager-5fb775575f-6jgrc\" (UID: \"c3e9afd3-207f-4a98-ab9a-1abb166da517\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.143714 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkxfb\" (UniqueName: \"kubernetes.io/projected/00c60d3a-58c3-4ad9-a015-1dacdebef5dc-kube-api-access-dkxfb\") pod \"glance-operator-controller-manager-8886f4c47-gztww\" (UID: \"00c60d3a-58c3-4ad9-a015-1dacdebef5dc\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.147357 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.154359 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jhf45" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.155284 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.156597 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.160310 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-mqxmr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.168767 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.188598 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.191937 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nspdg\" (UniqueName: \"kubernetes.io/projected/6966131b-ab1c-4de2-9a32-8bcbd1d26c4a-kube-api-access-nspdg\") pod \"ironic-operator-controller-manager-5f4b8bd54d-t2v45\" (UID: \"6966131b-ab1c-4de2-9a32-8bcbd1d26c4a\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.192075 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66x44\" (UniqueName: \"kubernetes.io/projected/c96b6def-54b9-4b76-870c-7e504e58cca9-kube-api-access-66x44\") pod \"keystone-operator-controller-manager-84f48565d4-vxnv2\" (UID: \"c96b6def-54b9-4b76-870c-7e504e58cca9\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.192161 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5jqz\" (UniqueName: \"kubernetes.io/projected/02d790b4-9b97-45e9-8efa-4cb81384bfae-kube-api-access-s5jqz\") pod \"manila-operator-controller-manager-7dd968899f-qrtp4\" (UID: \"02d790b4-9b97-45e9-8efa-4cb81384bfae\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.192248 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxrln\" (UniqueName: \"kubernetes.io/projected/7c0e11e2-32c3-4d5b-889e-b5d55817c85c-kube-api-access-qxrln\") pod \"mariadb-operator-controller-manager-67bf948998-rrghs\" (UID: \"7c0e11e2-32c3-4d5b-889e-b5d55817c85c\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.209515 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.220436 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66x44\" (UniqueName: \"kubernetes.io/projected/c96b6def-54b9-4b76-870c-7e504e58cca9-kube-api-access-66x44\") pod \"keystone-operator-controller-manager-84f48565d4-vxnv2\" (UID: \"c96b6def-54b9-4b76-870c-7e504e58cca9\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.227093 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.227971 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.232895 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-cblmn" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.236429 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nspdg\" (UniqueName: \"kubernetes.io/projected/6966131b-ab1c-4de2-9a32-8bcbd1d26c4a-kube-api-access-nspdg\") pod \"ironic-operator-controller-manager-5f4b8bd54d-t2v45\" (UID: \"6966131b-ab1c-4de2-9a32-8bcbd1d26c4a\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.258213 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.276717 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.280459 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.282922 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.283577 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-fk75v" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.287248 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.292087 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.292929 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.294594 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-c7cpj" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.296715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxrln\" (UniqueName: \"kubernetes.io/projected/7c0e11e2-32c3-4d5b-889e-b5d55817c85c-kube-api-access-qxrln\") pod \"mariadb-operator-controller-manager-67bf948998-rrghs\" (UID: \"7c0e11e2-32c3-4d5b-889e-b5d55817c85c\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.296831 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvj5r\" (UniqueName: \"kubernetes.io/projected/d838894e-3a4d-401c-b4d1-b4464d006b88-kube-api-access-qvj5r\") pod \"neutron-operator-controller-manager-585dbc889-xgcxv\" (UID: \"d838894e-3a4d-401c-b4d1-b4464d006b88\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.296879 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6gtr\" (UniqueName: \"kubernetes.io/projected/447cd897-c504-49fa-82b8-5c205e002cfe-kube-api-access-c6gtr\") pod \"nova-operator-controller-manager-55bff696bd-c8rqm\" (UID: \"447cd897-c504-49fa-82b8-5c205e002cfe\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.296898 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5jqz\" (UniqueName: \"kubernetes.io/projected/02d790b4-9b97-45e9-8efa-4cb81384bfae-kube-api-access-s5jqz\") pod \"manila-operator-controller-manager-7dd968899f-qrtp4\" (UID: \"02d790b4-9b97-45e9-8efa-4cb81384bfae\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.311333 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.313226 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.317796 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.337052 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxrln\" (UniqueName: \"kubernetes.io/projected/7c0e11e2-32c3-4d5b-889e-b5d55817c85c-kube-api-access-qxrln\") pod \"mariadb-operator-controller-manager-67bf948998-rrghs\" (UID: \"7c0e11e2-32c3-4d5b-889e-b5d55817c85c\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.341922 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-qx9lv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.347434 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5jqz\" (UniqueName: \"kubernetes.io/projected/02d790b4-9b97-45e9-8efa-4cb81384bfae-kube-api-access-s5jqz\") pod \"manila-operator-controller-manager-7dd968899f-qrtp4\" (UID: \"02d790b4-9b97-45e9-8efa-4cb81384bfae\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.347820 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.363687 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.369684 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.398598 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7k88\" (UniqueName: \"kubernetes.io/projected/a954bf90-cac3-4896-bbaf-8ad98f3876d9-kube-api-access-r7k88\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.399168 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfc5z\" (UniqueName: \"kubernetes.io/projected/73dcbbad-3aa0-48d6-ac55-ad4443f781d3-kube-api-access-gfc5z\") pod \"ovn-operator-controller-manager-788c46999f-4sdk7\" (UID: \"73dcbbad-3aa0-48d6-ac55-ad4443f781d3\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.399238 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l92k\" (UniqueName: \"kubernetes.io/projected/154195a7-15d8-454e-8e95-2e5f3935d2da-kube-api-access-8l92k\") pod \"octavia-operator-controller-manager-6687f8d877-5dbkk\" (UID: \"154195a7-15d8-454e-8e95-2e5f3935d2da\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.399266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvj5r\" (UniqueName: \"kubernetes.io/projected/d838894e-3a4d-401c-b4d1-b4464d006b88-kube-api-access-qvj5r\") pod \"neutron-operator-controller-manager-585dbc889-xgcxv\" (UID: \"d838894e-3a4d-401c-b4d1-b4464d006b88\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.399342 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6gtr\" (UniqueName: \"kubernetes.io/projected/447cd897-c504-49fa-82b8-5c205e002cfe-kube-api-access-c6gtr\") pod \"nova-operator-controller-manager-55bff696bd-c8rqm\" (UID: \"447cd897-c504-49fa-82b8-5c205e002cfe\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.399391 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.409934 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.417059 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.418225 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.418332 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6gtr\" (UniqueName: \"kubernetes.io/projected/447cd897-c504-49fa-82b8-5c205e002cfe-kube-api-access-c6gtr\") pod \"nova-operator-controller-manager-55bff696bd-c8rqm\" (UID: \"447cd897-c504-49fa-82b8-5c205e002cfe\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.421864 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.423881 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-9x4dn" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.429662 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvj5r\" (UniqueName: \"kubernetes.io/projected/d838894e-3a4d-401c-b4d1-b4464d006b88-kube-api-access-qvj5r\") pod \"neutron-operator-controller-manager-585dbc889-xgcxv\" (UID: \"d838894e-3a4d-401c-b4d1-b4464d006b88\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.463086 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.465906 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.471383 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.475481 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-9k5lb" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.482844 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.490339 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.500492 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.500943 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.500986 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7k88\" (UniqueName: \"kubernetes.io/projected/a954bf90-cac3-4896-bbaf-8ad98f3876d9-kube-api-access-r7k88\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.501025 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfc5z\" (UniqueName: \"kubernetes.io/projected/73dcbbad-3aa0-48d6-ac55-ad4443f781d3-kube-api-access-gfc5z\") pod \"ovn-operator-controller-manager-788c46999f-4sdk7\" (UID: \"73dcbbad-3aa0-48d6-ac55-ad4443f781d3\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.501056 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l92k\" (UniqueName: \"kubernetes.io/projected/154195a7-15d8-454e-8e95-2e5f3935d2da-kube-api-access-8l92k\") pod \"octavia-operator-controller-manager-6687f8d877-5dbkk\" (UID: \"154195a7-15d8-454e-8e95-2e5f3935d2da\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.501078 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-777tk\" (UniqueName: \"kubernetes.io/projected/d627e8f0-589b-44a6-bf5c-9049ac454363-kube-api-access-777tk\") pod \"placement-operator-controller-manager-5b964cf4cd-98dqh\" (UID: \"d627e8f0-589b-44a6-bf5c-9049ac454363\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.501621 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.501697 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:51.001677055 +0000 UTC m=+1009.288370861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: W0203 07:02:50.521436 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5254fd85_6147_4f7f_9ed7_d5491795590e.slice/crio-802805432e78d4c0629c9c50a7e1096657ebe53b12e3c426a33f96119f63ae94 WatchSource:0}: Error finding container 802805432e78d4c0629c9c50a7e1096657ebe53b12e3c426a33f96119f63ae94: Status 404 returned error can't find the container with id 802805432e78d4c0629c9c50a7e1096657ebe53b12e3c426a33f96119f63ae94 Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.525980 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfc5z\" (UniqueName: \"kubernetes.io/projected/73dcbbad-3aa0-48d6-ac55-ad4443f781d3-kube-api-access-gfc5z\") pod \"ovn-operator-controller-manager-788c46999f-4sdk7\" (UID: \"73dcbbad-3aa0-48d6-ac55-ad4443f781d3\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.527447 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.547870 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.548950 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.552556 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l92k\" (UniqueName: \"kubernetes.io/projected/154195a7-15d8-454e-8e95-2e5f3935d2da-kube-api-access-8l92k\") pod \"octavia-operator-controller-manager-6687f8d877-5dbkk\" (UID: \"154195a7-15d8-454e-8e95-2e5f3935d2da\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.556763 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.557994 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.559585 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-phxj5" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.602888 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7pxn\" (UniqueName: \"kubernetes.io/projected/c24717b4-268b-49c0-82d1-b63ebcc16bf7-kube-api-access-g7pxn\") pod \"telemetry-operator-controller-manager-64b5b76f97-4dj25\" (UID: \"c24717b4-268b-49c0-82d1-b63ebcc16bf7\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.602972 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh4sc\" (UniqueName: \"kubernetes.io/projected/68fc9816-016d-4444-8ebc-fb099a3e0d3c-kube-api-access-sh4sc\") pod \"swift-operator-controller-manager-68fc8c869-46ddp\" (UID: \"68fc9816-016d-4444-8ebc-fb099a3e0d3c\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.603067 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.603095 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-777tk\" (UniqueName: \"kubernetes.io/projected/d627e8f0-589b-44a6-bf5c-9049ac454363-kube-api-access-777tk\") pod \"placement-operator-controller-manager-5b964cf4cd-98dqh\" (UID: \"d627e8f0-589b-44a6-bf5c-9049ac454363\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.610011 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: E0203 07:02:50.610087 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:02:51.610068539 +0000 UTC m=+1009.896762345 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.632613 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7k88\" (UniqueName: \"kubernetes.io/projected/a954bf90-cac3-4896-bbaf-8ad98f3876d9-kube-api-access-r7k88\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.638489 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-j4xmh"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.672316 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.675007 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-xw9v6" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.692561 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-j4xmh"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.698872 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.715661 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7ktq\" (UniqueName: \"kubernetes.io/projected/d36616fb-0b5a-453d-b281-8df36af93238-kube-api-access-h7ktq\") pod \"test-operator-controller-manager-56f8bfcd9f-5fwsr\" (UID: \"d36616fb-0b5a-453d-b281-8df36af93238\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.715844 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7pxn\" (UniqueName: \"kubernetes.io/projected/c24717b4-268b-49c0-82d1-b63ebcc16bf7-kube-api-access-g7pxn\") pod \"telemetry-operator-controller-manager-64b5b76f97-4dj25\" (UID: \"c24717b4-268b-49c0-82d1-b63ebcc16bf7\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.716342 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh4sc\" (UniqueName: \"kubernetes.io/projected/68fc9816-016d-4444-8ebc-fb099a3e0d3c-kube-api-access-sh4sc\") pod \"swift-operator-controller-manager-68fc8c869-46ddp\" (UID: \"68fc9816-016d-4444-8ebc-fb099a3e0d3c\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.729584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-777tk\" (UniqueName: \"kubernetes.io/projected/d627e8f0-589b-44a6-bf5c-9049ac454363-kube-api-access-777tk\") pod \"placement-operator-controller-manager-5b964cf4cd-98dqh\" (UID: \"d627e8f0-589b-44a6-bf5c-9049ac454363\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.749398 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.750412 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.758200 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.758940 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.759523 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.760637 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7jp46" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.765528 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.769708 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh4sc\" (UniqueName: \"kubernetes.io/projected/68fc9816-016d-4444-8ebc-fb099a3e0d3c-kube-api-access-sh4sc\") pod \"swift-operator-controller-manager-68fc8c869-46ddp\" (UID: \"68fc9816-016d-4444-8ebc-fb099a3e0d3c\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.779586 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.779648 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7pxn\" (UniqueName: \"kubernetes.io/projected/c24717b4-268b-49c0-82d1-b63ebcc16bf7-kube-api-access-g7pxn\") pod \"telemetry-operator-controller-manager-64b5b76f97-4dj25\" (UID: \"c24717b4-268b-49c0-82d1-b63ebcc16bf7\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.780941 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.786376 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-gtknf" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.809691 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c"] Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.818340 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7ktq\" (UniqueName: \"kubernetes.io/projected/d36616fb-0b5a-453d-b281-8df36af93238-kube-api-access-h7ktq\") pod \"test-operator-controller-manager-56f8bfcd9f-5fwsr\" (UID: \"d36616fb-0b5a-453d-b281-8df36af93238\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.818533 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7hkt\" (UniqueName: \"kubernetes.io/projected/30f4bbbd-5f3f-4f45-96cf-33fc6c63f458-kube-api-access-q7hkt\") pod \"watcher-operator-controller-manager-564965969-j4xmh\" (UID: \"30f4bbbd-5f3f-4f45-96cf-33fc6c63f458\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.818749 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.835591 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7ktq\" (UniqueName: \"kubernetes.io/projected/d36616fb-0b5a-453d-b281-8df36af93238-kube-api-access-h7ktq\") pod \"test-operator-controller-manager-56f8bfcd9f-5fwsr\" (UID: \"d36616fb-0b5a-453d-b281-8df36af93238\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.853607 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.919419 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgdpc\" (UniqueName: \"kubernetes.io/projected/b4eb1b29-8a96-435e-ac43-e4ee5d349047-kube-api-access-rgdpc\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.919519 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.919540 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7hkt\" (UniqueName: \"kubernetes.io/projected/30f4bbbd-5f3f-4f45-96cf-33fc6c63f458-kube-api-access-q7hkt\") pod \"watcher-operator-controller-manager-564965969-j4xmh\" (UID: \"30f4bbbd-5f3f-4f45-96cf-33fc6c63f458\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.919584 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.919608 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqjjc\" (UniqueName: \"kubernetes.io/projected/0ab700e8-fa08-4fb9-9ef8-4053055f99ee-kube-api-access-bqjjc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h2s2c\" (UID: \"0ab700e8-fa08-4fb9-9ef8-4053055f99ee\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" Feb 03 07:02:50 crc kubenswrapper[4998]: I0203 07:02:50.937014 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7hkt\" (UniqueName: \"kubernetes.io/projected/30f4bbbd-5f3f-4f45-96cf-33fc6c63f458-kube-api-access-q7hkt\") pod \"watcher-operator-controller-manager-564965969-j4xmh\" (UID: \"30f4bbbd-5f3f-4f45-96cf-33fc6c63f458\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.020756 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.020862 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.020899 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqjjc\" (UniqueName: \"kubernetes.io/projected/0ab700e8-fa08-4fb9-9ef8-4053055f99ee-kube-api-access-bqjjc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h2s2c\" (UID: \"0ab700e8-fa08-4fb9-9ef8-4053055f99ee\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.020958 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgdpc\" (UniqueName: \"kubernetes.io/projected/b4eb1b29-8a96-435e-ac43-e4ee5d349047-kube-api-access-rgdpc\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.020960 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.020978 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.021030 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:51.521008598 +0000 UTC m=+1009.807702514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.021118 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.021164 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:52.021150362 +0000 UTC m=+1010.307844168 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.021204 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.021227 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:51.521220374 +0000 UTC m=+1009.807914180 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.022342 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.034849 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.046070 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgdpc\" (UniqueName: \"kubernetes.io/projected/b4eb1b29-8a96-435e-ac43-e4ee5d349047-kube-api-access-rgdpc\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.054265 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.056419 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqjjc\" (UniqueName: \"kubernetes.io/projected/0ab700e8-fa08-4fb9-9ef8-4053055f99ee-kube-api-access-bqjjc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h2s2c\" (UID: \"0ab700e8-fa08-4fb9-9ef8-4053055f99ee\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.062958 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.144294 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" event={"ID":"5254fd85-6147-4f7f-9ed7-d5491795590e","Type":"ContainerStarted","Data":"802805432e78d4c0629c9c50a7e1096657ebe53b12e3c426a33f96119f63ae94"} Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.145952 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" event={"ID":"555310a7-1022-4224-8329-56cf0b598983","Type":"ContainerStarted","Data":"9dc530cf6556415d1b6e8f92639ac1c532259fb58c4b189c8d20a098facf54f0"} Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.172613 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.200393 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.260770 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.527934 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.528026 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.528208 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.528270 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:52.528250698 +0000 UTC m=+1010.814944494 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.528880 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.528942 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:52.528924877 +0000 UTC m=+1010.815618683 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.543003 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.548582 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.556679 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2"] Feb 03 07:02:51 crc kubenswrapper[4998]: W0203 07:02:51.558721 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c0e11e2_32c3_4d5b_889e_b5d55817c85c.slice/crio-ec173f29c6703c2d16fb75aabc007faa6fffa6e9971c6245bfd4de7d77224acb WatchSource:0}: Error finding container ec173f29c6703c2d16fb75aabc007faa6fffa6e9971c6245bfd4de7d77224acb: Status 404 returned error can't find the container with id ec173f29c6703c2d16fb75aabc007faa6fffa6e9971c6245bfd4de7d77224acb Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.564182 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.628976 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.629206 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: E0203 07:02:51.629261 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:02:53.629247451 +0000 UTC m=+1011.915941257 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.733537 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.772366 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-gztww"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.951218 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45"] Feb 03 07:02:51 crc kubenswrapper[4998]: I0203 07:02:51.956600 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7"] Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.034618 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.034899 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.034956 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:54.034941102 +0000 UTC m=+1012.321634908 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.045308 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-j4xmh"] Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.065640 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c"] Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.077867 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sh4sc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68fc8c869-46ddp_openstack-operators(68fc9816-016d-4444-8ebc-fb099a3e0d3c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.085265 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25"] Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.086636 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" podUID="68fc9816-016d-4444-8ebc-fb099a3e0d3c" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.093961 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8l92k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-5dbkk_openstack-operators(154195a7-15d8-454e-8e95-2e5f3935d2da): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.095472 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" podUID="154195a7-15d8-454e-8e95-2e5f3935d2da" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.095971 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q7hkt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-j4xmh_openstack-operators(30f4bbbd-5f3f-4f45-96cf-33fc6c63f458): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.096652 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh"] Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.097120 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" podUID="30f4bbbd-5f3f-4f45-96cf-33fc6c63f458" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.110984 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv"] Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.123372 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr"] Feb 03 07:02:52 crc kubenswrapper[4998]: W0203 07:02:52.127411 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd36616fb_0b5a_453d_b281_8df36af93238.slice/crio-39ff04698097fc39de96e61c7b8b8c1ee07bfa8022a4be730e11e72b87d986c4 WatchSource:0}: Error finding container 39ff04698097fc39de96e61c7b8b8c1ee07bfa8022a4be730e11e72b87d986c4: Status 404 returned error can't find the container with id 39ff04698097fc39de96e61c7b8b8c1ee07bfa8022a4be730e11e72b87d986c4 Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.133397 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-777tk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5b964cf4cd-98dqh_openstack-operators(d627e8f0-589b-44a6-bf5c-9049ac454363): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.134945 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" podUID="d627e8f0-589b-44a6-bf5c-9049ac454363" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.137099 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h7ktq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-5fwsr_openstack-operators(d36616fb-0b5a-453d-b281-8df36af93238): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.138526 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" podUID="d36616fb-0b5a-453d-b281-8df36af93238" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.145178 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp"] Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.155326 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" event={"ID":"6966131b-ab1c-4de2-9a32-8bcbd1d26c4a","Type":"ContainerStarted","Data":"ab605727cb45209ed04eb1d494cf7fc2ba8ac5cc15da63afd0d3c9c27b1fb6ea"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.155590 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk"] Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.157033 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" event={"ID":"15e1bb02-71fd-439f-b8b0-769aebffd30e","Type":"ContainerStarted","Data":"95287166ca6ef87d23a0aff6b6e7b7dba0ed95a6b151937d640c19e0dc568dfc"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.157820 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" event={"ID":"c3e9afd3-207f-4a98-ab9a-1abb166da517","Type":"ContainerStarted","Data":"983e6ad40b91d91eb4a60874e98749b9c391b3a5cb8501b9cb2873f1f32c0979"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.158670 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" event={"ID":"447cd897-c504-49fa-82b8-5c205e002cfe","Type":"ContainerStarted","Data":"3a5cab540e7526ef51acb4c9de674130c5b4e87b1c9abf025743aaa73428ab6b"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.159637 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" event={"ID":"c24717b4-268b-49c0-82d1-b63ebcc16bf7","Type":"ContainerStarted","Data":"5782c031e9f1913e392ab781e6e2ce44f9d5e61acc73572049415b0b3e4c170a"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.161593 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" event={"ID":"d838894e-3a4d-401c-b4d1-b4464d006b88","Type":"ContainerStarted","Data":"a0c22523cd4b2f086de09c6b472ee299d286c9a5c11a68518fed133cdfb36971"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.162751 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" event={"ID":"7c0e11e2-32c3-4d5b-889e-b5d55817c85c","Type":"ContainerStarted","Data":"ec173f29c6703c2d16fb75aabc007faa6fffa6e9971c6245bfd4de7d77224acb"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.163791 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" event={"ID":"68fc9816-016d-4444-8ebc-fb099a3e0d3c","Type":"ContainerStarted","Data":"b156a8dd9c883abfe095b02fbc69dd78fecd6e24308ca18ea76673f28a100b6a"} Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.165344 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" podUID="68fc9816-016d-4444-8ebc-fb099a3e0d3c" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.165935 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" event={"ID":"02d790b4-9b97-45e9-8efa-4cb81384bfae","Type":"ContainerStarted","Data":"04c723529d8831027912e2b0fb9f362438ed636eceaf6ac9517d15c03bab4ebd"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.167373 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" event={"ID":"c96b6def-54b9-4b76-870c-7e504e58cca9","Type":"ContainerStarted","Data":"b078c9ed9eb03c05d0017b4b966aa1e8541ec400ad808f32dd10335be50af27d"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.168998 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" event={"ID":"44f95b1e-8d2d-4db0-8434-b4ae01d46f98","Type":"ContainerStarted","Data":"646f4a1843369cb8a0a4190bd42c4ef7e9a5f7154df534e3de34293fa157a9b7"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.170477 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" event={"ID":"d627e8f0-589b-44a6-bf5c-9049ac454363","Type":"ContainerStarted","Data":"be95c67669a5272750f8f5d6d1558f30f2e6dc521cab70236ce7856eedc4e4a4"} Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.171546 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" podUID="d627e8f0-589b-44a6-bf5c-9049ac454363" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.171684 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" event={"ID":"d36616fb-0b5a-453d-b281-8df36af93238","Type":"ContainerStarted","Data":"39ff04698097fc39de96e61c7b8b8c1ee07bfa8022a4be730e11e72b87d986c4"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.175260 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" event={"ID":"154195a7-15d8-454e-8e95-2e5f3935d2da","Type":"ContainerStarted","Data":"9683b78e80fe599c6c978044c627094408953e8113f8f84a2df9857ff5432975"} Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.185414 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" podUID="d36616fb-0b5a-453d-b281-8df36af93238" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.185485 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" podUID="154195a7-15d8-454e-8e95-2e5f3935d2da" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.186075 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" event={"ID":"73dcbbad-3aa0-48d6-ac55-ad4443f781d3","Type":"ContainerStarted","Data":"ce0ca7bd3791da33cf21d2b741911f0919ed7aa532043b802289436625f97b24"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.193092 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" event={"ID":"30f4bbbd-5f3f-4f45-96cf-33fc6c63f458","Type":"ContainerStarted","Data":"b5bffb60d5df7b502e6e126967f2483711afb7ad4555edff1dea16f71ce13469"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.196211 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" event={"ID":"00c60d3a-58c3-4ad9-a015-1dacdebef5dc","Type":"ContainerStarted","Data":"c31f845156fc92a4d940bfc2b59c9770cf651c325739e2a1c6a685c78e5c9b01"} Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.197885 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" event={"ID":"0ab700e8-fa08-4fb9-9ef8-4053055f99ee","Type":"ContainerStarted","Data":"31805f2ee421e5febbf0ad209f5633fd2a13371110803c7936e5e982fe86c93c"} Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.202890 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" podUID="30f4bbbd-5f3f-4f45-96cf-33fc6c63f458" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.540765 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:52 crc kubenswrapper[4998]: I0203 07:02:52.540866 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.541049 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.542002 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:54.541983896 +0000 UTC m=+1012.828677702 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.542421 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:02:52 crc kubenswrapper[4998]: E0203 07:02:52.542515 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:54.542495791 +0000 UTC m=+1012.829189597 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.208575 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" podUID="d36616fb-0b5a-453d-b281-8df36af93238" Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.208709 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" podUID="68fc9816-016d-4444-8ebc-fb099a3e0d3c" Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.209023 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" podUID="30f4bbbd-5f3f-4f45-96cf-33fc6c63f458" Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.209240 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" podUID="154195a7-15d8-454e-8e95-2e5f3935d2da" Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.211130 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" podUID="d627e8f0-589b-44a6-bf5c-9049ac454363" Feb 03 07:02:53 crc kubenswrapper[4998]: I0203 07:02:53.663449 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.663798 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:53 crc kubenswrapper[4998]: E0203 07:02:53.663842 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:02:57.66382828 +0000 UTC m=+1015.950522086 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: I0203 07:02:54.070270 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.070456 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.070538 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:58.070520109 +0000 UTC m=+1016.357213915 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: I0203 07:02:54.576709 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.576883 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: I0203 07:02:54.577426 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.577554 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.577608 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:58.577579733 +0000 UTC m=+1016.864273539 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:02:54 crc kubenswrapper[4998]: E0203 07:02:54.577643 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:02:58.577634444 +0000 UTC m=+1016.864328250 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:02:57 crc kubenswrapper[4998]: I0203 07:02:57.727629 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:02:57 crc kubenswrapper[4998]: E0203 07:02:57.727838 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:57 crc kubenswrapper[4998]: E0203 07:02:57.728290 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:03:05.728266852 +0000 UTC m=+1024.014960708 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: I0203 07:02:58.134168 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.134380 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.134433 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:06.134418265 +0000 UTC m=+1024.421112071 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: I0203 07:02:58.643122 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:58 crc kubenswrapper[4998]: I0203 07:02:58.643686 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.643958 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.644084 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:06.644063013 +0000 UTC m=+1024.930756819 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.644668 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:02:58 crc kubenswrapper[4998]: E0203 07:02:58.644767 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:06.644756673 +0000 UTC m=+1024.931450479 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:03:05 crc kubenswrapper[4998]: I0203 07:03:05.744510 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" event={"ID":"c3e9afd3-207f-4a98-ab9a-1abb166da517","Type":"ContainerStarted","Data":"9c63fa7491cc2106fca7de1020c85558a27e93bf8b84d2a086e0695d55798767"} Feb 03 07:03:05 crc kubenswrapper[4998]: I0203 07:03:05.745457 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:03:05 crc kubenswrapper[4998]: I0203 07:03:05.783359 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" podStartSLOduration=2.998860102 podStartE2EDuration="16.783333438s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.568164737 +0000 UTC m=+1009.854858543" lastFinishedPulling="2026-02-03 07:03:05.352638063 +0000 UTC m=+1023.639331879" observedRunningTime="2026-02-03 07:03:05.759365213 +0000 UTC m=+1024.046059019" watchObservedRunningTime="2026-02-03 07:03:05.783333438 +0000 UTC m=+1024.070027244" Feb 03 07:03:05 crc kubenswrapper[4998]: I0203 07:03:05.790594 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:05 crc kubenswrapper[4998]: E0203 07:03:05.790804 4998 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:03:05 crc kubenswrapper[4998]: E0203 07:03:05.790861 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert podName:e6885e8a-0fe6-44be-93e6-b5c663958e1f nodeName:}" failed. No retries permitted until 2026-02-03 07:03:21.790840442 +0000 UTC m=+1040.077534248 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert") pod "infra-operator-controller-manager-79955696d6-7fllq" (UID: "e6885e8a-0fe6-44be-93e6-b5c663958e1f") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.195636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.195845 4998 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.195912 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert podName:a954bf90-cac3-4896-bbaf-8ad98f3876d9 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:22.195894394 +0000 UTC m=+1040.482588200 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert") pod "openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" (UID: "a954bf90-cac3-4896-bbaf-8ad98f3876d9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.728930 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.729085 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.729169 4998 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.729207 4998 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.729246 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:22.729227589 +0000 UTC m=+1041.015921395 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "webhook-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: E0203 07:03:06.729263 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs podName:b4eb1b29-8a96-435e-ac43-e4ee5d349047 nodeName:}" failed. No retries permitted until 2026-02-03 07:03:22.72925682 +0000 UTC m=+1041.015950626 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs") pod "openstack-operator-controller-manager-646f757d77-gxl8w" (UID: "b4eb1b29-8a96-435e-ac43-e4ee5d349047") : secret "metrics-server-cert" not found Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.751689 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" event={"ID":"44f95b1e-8d2d-4db0-8434-b4ae01d46f98","Type":"ContainerStarted","Data":"05eb45ec10b7aef4b2079629c5a1331b6071b2363085c283035dae3d87901e07"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.752607 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.754772 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" event={"ID":"5254fd85-6147-4f7f-9ed7-d5491795590e","Type":"ContainerStarted","Data":"9a5bb1840a000015b15327ac345b6628bae60e7d960d340da4d6e4f4cf69750f"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.754901 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.759278 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" event={"ID":"7c0e11e2-32c3-4d5b-889e-b5d55817c85c","Type":"ContainerStarted","Data":"782fcdd7d9589c0bba78e729671e2eed28aa9acd9322dd4b691273d072ca5916"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.759397 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.761001 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" event={"ID":"c96b6def-54b9-4b76-870c-7e504e58cca9","Type":"ContainerStarted","Data":"ea244c7a78abcced92c5d54bc53d0c056b0d0c721d326c38ccde54bbb01cffcb"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.761143 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.762812 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" event={"ID":"c24717b4-268b-49c0-82d1-b63ebcc16bf7","Type":"ContainerStarted","Data":"4a3c5272d97efdd6ab1cacfc0ddb04009356104b3d33b139c8732324fa30219b"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.762894 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.764255 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" event={"ID":"6966131b-ab1c-4de2-9a32-8bcbd1d26c4a","Type":"ContainerStarted","Data":"6df8a1f58c3764c15257734b382c910ca713c74b381da24c1e94a1d2493853d8"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.766245 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.781344 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" podStartSLOduration=3.705630737 podStartE2EDuration="17.781325486s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.280896667 +0000 UTC m=+1009.567590473" lastFinishedPulling="2026-02-03 07:03:05.356591406 +0000 UTC m=+1023.643285222" observedRunningTime="2026-02-03 07:03:06.779516714 +0000 UTC m=+1025.066210530" watchObservedRunningTime="2026-02-03 07:03:06.781325486 +0000 UTC m=+1025.068019292" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.795510 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" event={"ID":"00c60d3a-58c3-4ad9-a015-1dacdebef5dc","Type":"ContainerStarted","Data":"2c6f992b9b2c029e97617b373b2327e45e2a1912b4c4019b09d672e5456157f8"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.796136 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.812495 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" podStartSLOduration=4.964302367 podStartE2EDuration="17.812479325s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:50.558928569 +0000 UTC m=+1008.845622375" lastFinishedPulling="2026-02-03 07:03:03.407105537 +0000 UTC m=+1021.693799333" observedRunningTime="2026-02-03 07:03:06.810085717 +0000 UTC m=+1025.096779523" watchObservedRunningTime="2026-02-03 07:03:06.812479325 +0000 UTC m=+1025.099173131" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.816041 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" event={"ID":"0ab700e8-fa08-4fb9-9ef8-4053055f99ee","Type":"ContainerStarted","Data":"a919fbbc21ef9960de91d4665d3a08895884b7d7a0225a403ba12b267fad4e7a"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.829267 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" event={"ID":"447cd897-c504-49fa-82b8-5c205e002cfe","Type":"ContainerStarted","Data":"70addc8093c5d7e57f706036f3d7ea33f65c17244b4c211c199a43095b09c029"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.829853 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.839318 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" podStartSLOduration=3.559849291 podStartE2EDuration="16.839303961s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.076738535 +0000 UTC m=+1010.363432341" lastFinishedPulling="2026-02-03 07:03:05.356193185 +0000 UTC m=+1023.642887011" observedRunningTime="2026-02-03 07:03:06.835832842 +0000 UTC m=+1025.122526648" watchObservedRunningTime="2026-02-03 07:03:06.839303961 +0000 UTC m=+1025.125997767" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.841190 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" event={"ID":"02d790b4-9b97-45e9-8efa-4cb81384bfae","Type":"ContainerStarted","Data":"e98f6136fb120cbbac803ce90dda69d29a603dc7c39df5fc62939be2d1d37014"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.842657 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.849978 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" event={"ID":"d838894e-3a4d-401c-b4d1-b4464d006b88","Type":"ContainerStarted","Data":"68c0d2f6d399ac39acdfb3bd556bb97c9bf4fa5e12cb8bf87c9441bb95f154f2"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.850204 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.852700 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" event={"ID":"555310a7-1022-4224-8329-56cf0b598983","Type":"ContainerStarted","Data":"a58cb7765c480b1b9b4a3e53a75b65eca446342b77689ee7f4c8468233b35e6b"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.853290 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.855063 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" podStartSLOduration=4.531820072 podStartE2EDuration="17.85503346s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.031223016 +0000 UTC m=+1010.317916822" lastFinishedPulling="2026-02-03 07:03:05.354436384 +0000 UTC m=+1023.641130210" observedRunningTime="2026-02-03 07:03:06.854392222 +0000 UTC m=+1025.141086038" watchObservedRunningTime="2026-02-03 07:03:06.85503346 +0000 UTC m=+1025.141727266" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.857445 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" event={"ID":"15e1bb02-71fd-439f-b8b0-769aebffd30e","Type":"ContainerStarted","Data":"8fe00a59cedbf5daf1b4ff25339406b8e203f0141deb4a6af5e39a6c38c4eb6b"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.857564 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.861330 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" event={"ID":"73dcbbad-3aa0-48d6-ac55-ad4443f781d3","Type":"ContainerStarted","Data":"081e23bc74c884f27f1f4b1ccf8feb2d0580b54d3868b04b208c115d67250f10"} Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.888934 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" podStartSLOduration=4.101203349 podStartE2EDuration="17.888911247s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.566019356 +0000 UTC m=+1009.852713162" lastFinishedPulling="2026-02-03 07:03:05.353727254 +0000 UTC m=+1023.640421060" observedRunningTime="2026-02-03 07:03:06.872334704 +0000 UTC m=+1025.159028520" watchObservedRunningTime="2026-02-03 07:03:06.888911247 +0000 UTC m=+1025.175605063" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.908327 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" podStartSLOduration=4.113015717 podStartE2EDuration="17.908307681s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.557630707 +0000 UTC m=+1009.844324513" lastFinishedPulling="2026-02-03 07:03:05.352922661 +0000 UTC m=+1023.639616477" observedRunningTime="2026-02-03 07:03:06.904289736 +0000 UTC m=+1025.190983552" watchObservedRunningTime="2026-02-03 07:03:06.908307681 +0000 UTC m=+1025.195001487" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.936593 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" podStartSLOduration=4.137707361 podStartE2EDuration="17.936572308s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.557576555 +0000 UTC m=+1009.844270361" lastFinishedPulling="2026-02-03 07:03:05.356441502 +0000 UTC m=+1023.643135308" observedRunningTime="2026-02-03 07:03:06.927655223 +0000 UTC m=+1025.214349029" watchObservedRunningTime="2026-02-03 07:03:06.936572308 +0000 UTC m=+1025.223266114" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.953865 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h2s2c" podStartSLOduration=3.503533275 podStartE2EDuration="16.953845371s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.069714965 +0000 UTC m=+1010.356408771" lastFinishedPulling="2026-02-03 07:03:05.520027061 +0000 UTC m=+1023.806720867" observedRunningTime="2026-02-03 07:03:06.95032733 +0000 UTC m=+1025.237021156" watchObservedRunningTime="2026-02-03 07:03:06.953845371 +0000 UTC m=+1025.240539187" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.966662 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" podStartSLOduration=4.359366139 podStartE2EDuration="17.966638996s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.748764383 +0000 UTC m=+1010.035458189" lastFinishedPulling="2026-02-03 07:03:05.35603724 +0000 UTC m=+1023.642731046" observedRunningTime="2026-02-03 07:03:06.961915151 +0000 UTC m=+1025.248608957" watchObservedRunningTime="2026-02-03 07:03:06.966638996 +0000 UTC m=+1025.253332802" Feb 03 07:03:06 crc kubenswrapper[4998]: I0203 07:03:06.989600 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" podStartSLOduration=4.708340871 podStartE2EDuration="17.989583921s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.074945094 +0000 UTC m=+1010.361638890" lastFinishedPulling="2026-02-03 07:03:05.356188134 +0000 UTC m=+1023.642881940" observedRunningTime="2026-02-03 07:03:06.986626787 +0000 UTC m=+1025.273320613" watchObservedRunningTime="2026-02-03 07:03:06.989583921 +0000 UTC m=+1025.276277727" Feb 03 07:03:07 crc kubenswrapper[4998]: I0203 07:03:07.041962 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" podStartSLOduration=4.468470804 podStartE2EDuration="18.041945946s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.783087493 +0000 UTC m=+1010.069781299" lastFinishedPulling="2026-02-03 07:03:05.356562625 +0000 UTC m=+1023.643256441" observedRunningTime="2026-02-03 07:03:07.017201799 +0000 UTC m=+1025.303895615" watchObservedRunningTime="2026-02-03 07:03:07.041945946 +0000 UTC m=+1025.328639752" Feb 03 07:03:07 crc kubenswrapper[4998]: I0203 07:03:07.044052 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" podStartSLOduration=3.683697338 podStartE2EDuration="17.044044786s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.000604972 +0000 UTC m=+1010.287298798" lastFinishedPulling="2026-02-03 07:03:05.36095243 +0000 UTC m=+1023.647646246" observedRunningTime="2026-02-03 07:03:07.040789683 +0000 UTC m=+1025.327483499" watchObservedRunningTime="2026-02-03 07:03:07.044044786 +0000 UTC m=+1025.330738592" Feb 03 07:03:07 crc kubenswrapper[4998]: I0203 07:03:07.066661 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" podStartSLOduration=5.345685704 podStartE2EDuration="18.066637881s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.138695488 +0000 UTC m=+1009.425389294" lastFinishedPulling="2026-02-03 07:03:03.859647665 +0000 UTC m=+1022.146341471" observedRunningTime="2026-02-03 07:03:07.065382685 +0000 UTC m=+1025.352076501" watchObservedRunningTime="2026-02-03 07:03:07.066637881 +0000 UTC m=+1025.353331677" Feb 03 07:03:07 crc kubenswrapper[4998]: I0203 07:03:07.094944 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" podStartSLOduration=3.878311717 podStartE2EDuration="18.094927398s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:51.143071493 +0000 UTC m=+1009.429765299" lastFinishedPulling="2026-02-03 07:03:05.359687164 +0000 UTC m=+1023.646380980" observedRunningTime="2026-02-03 07:03:07.090057719 +0000 UTC m=+1025.376751545" watchObservedRunningTime="2026-02-03 07:03:07.094927398 +0000 UTC m=+1025.381621204" Feb 03 07:03:07 crc kubenswrapper[4998]: I0203 07:03:07.878743 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:03:08 crc kubenswrapper[4998]: I0203 07:03:08.885515 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" event={"ID":"154195a7-15d8-454e-8e95-2e5f3935d2da","Type":"ContainerStarted","Data":"89b8914b32f4abdf8ac6586353f924da88a9850fa2727f2ad55a56cea9fbfcc3"} Feb 03 07:03:08 crc kubenswrapper[4998]: I0203 07:03:08.886021 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:03:08 crc kubenswrapper[4998]: I0203 07:03:08.911133 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" event={"ID":"d36616fb-0b5a-453d-b281-8df36af93238","Type":"ContainerStarted","Data":"4d58c20ae24fb445ccae0f7ae473df96f78a687ed33240628616c14dcf3ea606"} Feb 03 07:03:08 crc kubenswrapper[4998]: I0203 07:03:08.911844 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" podStartSLOduration=3.332486734 podStartE2EDuration="19.911793251s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.093835313 +0000 UTC m=+1010.380529119" lastFinishedPulling="2026-02-03 07:03:08.67314183 +0000 UTC m=+1026.959835636" observedRunningTime="2026-02-03 07:03:08.906380397 +0000 UTC m=+1027.193074223" watchObservedRunningTime="2026-02-03 07:03:08.911793251 +0000 UTC m=+1027.198487057" Feb 03 07:03:08 crc kubenswrapper[4998]: I0203 07:03:08.932372 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" podStartSLOduration=2.39679679 podStartE2EDuration="18.932353018s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.136922263 +0000 UTC m=+1010.423616069" lastFinishedPulling="2026-02-03 07:03:08.672478481 +0000 UTC m=+1026.959172297" observedRunningTime="2026-02-03 07:03:08.932013858 +0000 UTC m=+1027.218707684" watchObservedRunningTime="2026-02-03 07:03:08.932353018 +0000 UTC m=+1027.219046824" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.059566 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-jqjg8" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.093152 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-cqsb2" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.100142 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-r7xwj" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.141403 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-5jdj9" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.175323 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-6jgrc" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.293978 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-vxnv2" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.350282 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-rrghs" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.421123 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-gztww" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.472119 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-qrtp4" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.490398 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-xgcxv" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.509079 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-c8rqm" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.530108 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-t2v45" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.703119 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-4sdk7" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.822035 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4dj25" Feb 03 07:03:10 crc kubenswrapper[4998]: I0203 07:03:10.854423 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.939010 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" event={"ID":"68fc9816-016d-4444-8ebc-fb099a3e0d3c","Type":"ContainerStarted","Data":"dd7bbd3a52abd0a6ff29ba56798e80e7f3148cc00f77259102b8867aaf74bd82"} Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.939575 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.940556 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" event={"ID":"30f4bbbd-5f3f-4f45-96cf-33fc6c63f458","Type":"ContainerStarted","Data":"b7b9cfc52141d7537317a7915b50ad2537fe7aedc43df4afdf0e02c59e76059f"} Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.940771 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.941835 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" event={"ID":"d627e8f0-589b-44a6-bf5c-9049ac454363","Type":"ContainerStarted","Data":"cecc6a24928ada139c818b4bc76aa396ce7bc482bdb440f650ae37b8a58d4741"} Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.942017 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.970051 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" podStartSLOduration=2.447761276 podStartE2EDuration="21.970028301s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.077443095 +0000 UTC m=+1010.364136901" lastFinishedPulling="2026-02-03 07:03:11.59971012 +0000 UTC m=+1029.886403926" observedRunningTime="2026-02-03 07:03:11.961604591 +0000 UTC m=+1030.248298407" watchObservedRunningTime="2026-02-03 07:03:11.970028301 +0000 UTC m=+1030.256722107" Feb 03 07:03:11 crc kubenswrapper[4998]: I0203 07:03:11.985244 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" podStartSLOduration=2.496611861 podStartE2EDuration="21.985220095s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.095860691 +0000 UTC m=+1010.382554497" lastFinishedPulling="2026-02-03 07:03:11.584468925 +0000 UTC m=+1029.871162731" observedRunningTime="2026-02-03 07:03:11.980059007 +0000 UTC m=+1030.266752823" watchObservedRunningTime="2026-02-03 07:03:11.985220095 +0000 UTC m=+1030.271913911" Feb 03 07:03:12 crc kubenswrapper[4998]: I0203 07:03:12.002317 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" podStartSLOduration=2.535728037 podStartE2EDuration="22.002297442s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="2026-02-03 07:02:52.133238108 +0000 UTC m=+1010.419931914" lastFinishedPulling="2026-02-03 07:03:11.599807513 +0000 UTC m=+1029.886501319" observedRunningTime="2026-02-03 07:03:11.997979759 +0000 UTC m=+1030.284673595" watchObservedRunningTime="2026-02-03 07:03:12.002297442 +0000 UTC m=+1030.288991248" Feb 03 07:03:20 crc kubenswrapper[4998]: I0203 07:03:20.562108 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-5dbkk" Feb 03 07:03:20 crc kubenswrapper[4998]: I0203 07:03:20.855918 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5fwsr" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.025193 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-98dqh" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.091299 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-46ddp" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.175517 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-j4xmh" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.873926 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.881940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6885e8a-0fe6-44be-93e6-b5c663958e1f-cert\") pod \"infra-operator-controller-manager-79955696d6-7fllq\" (UID: \"e6885e8a-0fe6-44be-93e6-b5c663958e1f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:21 crc kubenswrapper[4998]: I0203 07:03:21.994561 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-cdk8f" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.002934 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.279575 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.279627 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-7fllq"] Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.286892 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a954bf90-cac3-4896-bbaf-8ad98f3876d9-cert\") pod \"openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p\" (UID: \"a954bf90-cac3-4896-bbaf-8ad98f3876d9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.443207 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-fk75v" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.451752 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.785684 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.785808 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.789126 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-webhook-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.789270 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4eb1b29-8a96-435e-ac43-e4ee5d349047-metrics-certs\") pod \"openstack-operator-controller-manager-646f757d77-gxl8w\" (UID: \"b4eb1b29-8a96-435e-ac43-e4ee5d349047\") " pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.887145 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p"] Feb 03 07:03:22 crc kubenswrapper[4998]: W0203 07:03:22.901066 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda954bf90_cac3_4896_bbaf_8ad98f3876d9.slice/crio-0f21afea09741621a15594b7a3c9e7a22dfb5de8b08d10d23ae10b4005068103 WatchSource:0}: Error finding container 0f21afea09741621a15594b7a3c9e7a22dfb5de8b08d10d23ae10b4005068103: Status 404 returned error can't find the container with id 0f21afea09741621a15594b7a3c9e7a22dfb5de8b08d10d23ae10b4005068103 Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.989846 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7jp46" Feb 03 07:03:22 crc kubenswrapper[4998]: I0203 07:03:22.998287 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:23 crc kubenswrapper[4998]: I0203 07:03:23.032871 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" event={"ID":"e6885e8a-0fe6-44be-93e6-b5c663958e1f","Type":"ContainerStarted","Data":"34a174bf34580a3e9b85511ac46b5651ff7b5a4f5e80441b081e44f4982ad5a0"} Feb 03 07:03:23 crc kubenswrapper[4998]: I0203 07:03:23.034371 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" event={"ID":"a954bf90-cac3-4896-bbaf-8ad98f3876d9","Type":"ContainerStarted","Data":"0f21afea09741621a15594b7a3c9e7a22dfb5de8b08d10d23ae10b4005068103"} Feb 03 07:03:23 crc kubenswrapper[4998]: I0203 07:03:23.414143 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w"] Feb 03 07:03:23 crc kubenswrapper[4998]: W0203 07:03:23.421943 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4eb1b29_8a96_435e_ac43_e4ee5d349047.slice/crio-b3fc00097835fffea06c235008dface5772c9bc73d20af0dec57e89c017a8167 WatchSource:0}: Error finding container b3fc00097835fffea06c235008dface5772c9bc73d20af0dec57e89c017a8167: Status 404 returned error can't find the container with id b3fc00097835fffea06c235008dface5772c9bc73d20af0dec57e89c017a8167 Feb 03 07:03:24 crc kubenswrapper[4998]: I0203 07:03:24.043465 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" event={"ID":"b4eb1b29-8a96-435e-ac43-e4ee5d349047","Type":"ContainerStarted","Data":"b3fc00097835fffea06c235008dface5772c9bc73d20af0dec57e89c017a8167"} Feb 03 07:03:29 crc kubenswrapper[4998]: I0203 07:03:29.082388 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" event={"ID":"b4eb1b29-8a96-435e-ac43-e4ee5d349047","Type":"ContainerStarted","Data":"b462245911b3b1f35200c8523cffca20b1ac0a67b17548d6cf6be9b349720e29"} Feb 03 07:03:30 crc kubenswrapper[4998]: I0203 07:03:30.088048 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:30 crc kubenswrapper[4998]: I0203 07:03:30.114056 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" podStartSLOduration=40.114021901 podStartE2EDuration="40.114021901s" podCreationTimestamp="2026-02-03 07:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:03:30.108341969 +0000 UTC m=+1048.395035845" watchObservedRunningTime="2026-02-03 07:03:30.114021901 +0000 UTC m=+1048.400715707" Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.096565 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" event={"ID":"e6885e8a-0fe6-44be-93e6-b5c663958e1f","Type":"ContainerStarted","Data":"68a68705e1e850a3ac4b912eaf48a49ec021e950f6684a1e0ebf13160a6dde55"} Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.097996 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.098037 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" event={"ID":"a954bf90-cac3-4896-bbaf-8ad98f3876d9","Type":"ContainerStarted","Data":"3b4ed17d3a270022e062c5e3d978dd48a5adb056475627306c609635f4797bad"} Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.098130 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.111581 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" podStartSLOduration=33.637573621 podStartE2EDuration="42.111560375s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:03:22.287823908 +0000 UTC m=+1040.574517754" lastFinishedPulling="2026-02-03 07:03:30.761810702 +0000 UTC m=+1049.048504508" observedRunningTime="2026-02-03 07:03:31.110929577 +0000 UTC m=+1049.397623423" watchObservedRunningTime="2026-02-03 07:03:31.111560375 +0000 UTC m=+1049.398254181" Feb 03 07:03:31 crc kubenswrapper[4998]: I0203 07:03:31.141036 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" podStartSLOduration=34.287862934 podStartE2EDuration="42.141015706s" podCreationTimestamp="2026-02-03 07:02:49 +0000 UTC" firstStartedPulling="2026-02-03 07:03:22.903294527 +0000 UTC m=+1041.189988333" lastFinishedPulling="2026-02-03 07:03:30.756447299 +0000 UTC m=+1049.043141105" observedRunningTime="2026-02-03 07:03:31.136727694 +0000 UTC m=+1049.423421540" watchObservedRunningTime="2026-02-03 07:03:31.141015706 +0000 UTC m=+1049.427709502" Feb 03 07:03:42 crc kubenswrapper[4998]: I0203 07:03:42.010519 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-7fllq" Feb 03 07:03:42 crc kubenswrapper[4998]: I0203 07:03:42.458423 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p" Feb 03 07:03:43 crc kubenswrapper[4998]: I0203 07:03:43.006261 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-646f757d77-gxl8w" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.149554 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.151386 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.154018 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.154488 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.154548 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9r5mc" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.154656 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.166456 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.203195 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.205483 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.208422 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.210120 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.333484 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.333857 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.333908 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w9fr\" (UniqueName: \"kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.333945 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7dpt\" (UniqueName: \"kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.333976 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.434744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.434833 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.434906 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w9fr\" (UniqueName: \"kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.434970 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7dpt\" (UniqueName: \"kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.435007 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.436123 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.436186 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.436221 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.453584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7dpt\" (UniqueName: \"kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt\") pod \"dnsmasq-dns-855cbc58c5-2svpw\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.454343 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w9fr\" (UniqueName: \"kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr\") pod \"dnsmasq-dns-6fcf94d689-76vgd\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.476093 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.526326 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.899369 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.905198 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:03:58 crc kubenswrapper[4998]: I0203 07:03:58.975710 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:03:59 crc kubenswrapper[4998]: I0203 07:03:59.883134 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" event={"ID":"687e12c8-30df-4d4f-9c72-740fde397ad4","Type":"ContainerStarted","Data":"256d2ab1b451302bf60e832f2225f5d9f81cd79a1c616617916d0405e8a3e6df"} Feb 03 07:03:59 crc kubenswrapper[4998]: I0203 07:03:59.885370 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" event={"ID":"aff94286-96c8-4ac9-9722-578d56b031d0","Type":"ContainerStarted","Data":"4b7defbbb4fa989e1d6fe3aa1d3b5437bb16e91e87b2a7aa97ef7c8f4c4e99a4"} Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.934996 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.956910 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.964044 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.974994 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.996394 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.996439 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghcv9\" (UniqueName: \"kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:00 crc kubenswrapper[4998]: I0203 07:04:00.996460 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.097595 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.097861 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghcv9\" (UniqueName: \"kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.097945 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.098839 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.099041 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.140742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghcv9\" (UniqueName: \"kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9\") pod \"dnsmasq-dns-f54874ffc-qbqgv\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.283247 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.392333 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.429362 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.430484 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.445293 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.606075 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.606165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.606213 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzw76\" (UniqueName: \"kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.707808 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.707877 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.707906 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzw76\" (UniqueName: \"kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.708757 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.708757 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.733592 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzw76\" (UniqueName: \"kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76\") pod \"dnsmasq-dns-67ff45466c-fjqgp\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.753532 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.868241 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:01 crc kubenswrapper[4998]: W0203 07:04:01.882615 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba09b0ed_2387_4b93_85b8_12d159155c7f.slice/crio-5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6 WatchSource:0}: Error finding container 5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6: Status 404 returned error can't find the container with id 5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6 Feb 03 07:04:01 crc kubenswrapper[4998]: I0203 07:04:01.910188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" event={"ID":"ba09b0ed-2387-4b93-85b8-12d159155c7f","Type":"ContainerStarted","Data":"5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6"} Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.156296 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.157386 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.160046 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.160195 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.160545 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.160896 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.162215 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.164220 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6gr6b" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.165390 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.172203 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.206722 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315125 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315352 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315437 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315500 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315535 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315551 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315586 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mxrp\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315607 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315621 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315649 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.315667 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416617 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416674 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416740 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416762 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.416967 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mxrp\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417004 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417024 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417063 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417090 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417117 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417437 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.417494 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.418042 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.419050 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.419525 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.419589 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.419714 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.420150 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.426098 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.428177 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.428234 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.429392 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.429846 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.431636 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.432765 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.436693 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mxrp\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.454295 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.503195 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.551920 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.553722 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.558082 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.559369 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.559369 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.559406 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gflxc" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.559423 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.559955 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.560097 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.567230 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726137 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726183 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726204 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726232 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726249 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726266 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726448 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726593 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmfjm\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726653 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726732 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.726805 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.804015 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6gr6b" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.812583 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828283 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828333 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828378 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828403 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828459 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828491 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828549 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmfjm\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828581 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828620 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828657 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.828699 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.829840 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.830218 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.830590 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.830893 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.830963 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.831192 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.834014 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.834214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.836169 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.837277 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.848086 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmfjm\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.860573 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.897474 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:02 crc kubenswrapper[4998]: I0203 07:04:02.923550 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" event={"ID":"f2019872-2dfd-4470-883b-6dbbc86eb084","Type":"ContainerStarted","Data":"c8e88d906cacbefac3e835df6afab736b5348a179a8bb6cef460d39dde2c79de"} Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.446742 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:04:03 crc kubenswrapper[4998]: W0203 07:04:03.476031 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59f5a5d7_787a_4941_a2d3_2fe8db65cb31.slice/crio-3a4a5c41d8f321c35b88f725c44754cf52f2388e8495c38d53c001c5abe94c33 WatchSource:0}: Error finding container 3a4a5c41d8f321c35b88f725c44754cf52f2388e8495c38d53c001c5abe94c33: Status 404 returned error can't find the container with id 3a4a5c41d8f321c35b88f725c44754cf52f2388e8495c38d53c001c5abe94c33 Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.514615 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:04:03 crc kubenswrapper[4998]: W0203 07:04:03.517219 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc9d5160_2c51_474c_aca1_1af693753ee8.slice/crio-a9e81aac73ee011ec939e75ef522f0676589317f53a4cf153e1c39705a414787 WatchSource:0}: Error finding container a9e81aac73ee011ec939e75ef522f0676589317f53a4cf153e1c39705a414787: Status 404 returned error can't find the container with id a9e81aac73ee011ec939e75ef522f0676589317f53a4cf153e1c39705a414787 Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.654205 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.657707 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.661342 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.661356 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.662207 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.662331 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-phq8g" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.667275 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.680112 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745118 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4v8n\" (UniqueName: \"kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745158 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745184 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745209 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745250 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745287 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745312 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.745339 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.846885 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4v8n\" (UniqueName: \"kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.846928 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.846947 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.846972 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.849095 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.849432 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.849489 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.849531 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.850171 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.850468 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.852428 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.853985 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.875845 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.876246 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.880028 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4v8n\" (UniqueName: \"kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.881557 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.886696 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " pod="openstack/openstack-galera-0" Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.933121 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerStarted","Data":"3a4a5c41d8f321c35b88f725c44754cf52f2388e8495c38d53c001c5abe94c33"} Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.935253 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerStarted","Data":"a9e81aac73ee011ec939e75ef522f0676589317f53a4cf153e1c39705a414787"} Feb 03 07:04:03 crc kubenswrapper[4998]: I0203 07:04:03.985675 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:04:04 crc kubenswrapper[4998]: I0203 07:04:04.470429 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.101917 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.103687 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.105803 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.107065 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rz6fx" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.107366 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.107742 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.116833 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172197 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172339 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172418 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khxtl\" (UniqueName: \"kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172447 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172598 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172655 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172676 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.172735 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274354 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274421 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274458 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khxtl\" (UniqueName: \"kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274509 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274565 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274583 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274601 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.274623 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.275642 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.276568 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.276689 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.277689 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.278805 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.279828 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.281317 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.292958 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khxtl\" (UniqueName: \"kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.316525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.401636 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.402757 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.406412 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.406653 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.407319 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-8pp9s" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.420485 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.440243 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.479904 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.480048 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.480095 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5p28\" (UniqueName: \"kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.480120 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.480143 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.582073 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5p28\" (UniqueName: \"kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.582137 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.582180 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.582235 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.582296 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.583320 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.583769 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.597818 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.604051 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5p28\" (UniqueName: \"kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.604372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " pod="openstack/memcached-0" Feb 03 07:04:05 crc kubenswrapper[4998]: I0203 07:04:05.728528 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.151175 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.152655 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.154851 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-hs8f2" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.164228 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.204302 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp8mf\" (UniqueName: \"kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf\") pod \"kube-state-metrics-0\" (UID: \"217d1e8d-a95d-4152-a8d9-e843cd3b7260\") " pod="openstack/kube-state-metrics-0" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.307656 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp8mf\" (UniqueName: \"kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf\") pod \"kube-state-metrics-0\" (UID: \"217d1e8d-a95d-4152-a8d9-e843cd3b7260\") " pod="openstack/kube-state-metrics-0" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.333173 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp8mf\" (UniqueName: \"kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf\") pod \"kube-state-metrics-0\" (UID: \"217d1e8d-a95d-4152-a8d9-e843cd3b7260\") " pod="openstack/kube-state-metrics-0" Feb 03 07:04:07 crc kubenswrapper[4998]: I0203 07:04:07.512520 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:04:08 crc kubenswrapper[4998]: I0203 07:04:08.978709 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerStarted","Data":"a3a00d41560da21f0e8ee8c1772a44096375d772023552c296e12fdc5ebfa631"} Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.186163 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.187741 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.194587 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.194959 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.195155 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-562w6" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.197167 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.204129 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.210370 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.213091 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280200 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280259 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280317 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280343 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280393 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280478 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280505 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280556 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280611 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280677 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280797 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280831 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxsrh\" (UniqueName: \"kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.280866 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz5jz\" (UniqueName: \"kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.381889 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.381933 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.381959 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.381987 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382016 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382051 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382068 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxsrh\" (UniqueName: \"kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382090 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz5jz\" (UniqueName: \"kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382119 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382133 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382157 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382173 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382195 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382486 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382553 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382669 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.382918 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.383024 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.383067 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.384251 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.384438 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.385195 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.388475 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.399565 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxsrh\" (UniqueName: \"kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.402708 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz5jz\" (UniqueName: \"kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz\") pod \"ovn-controller-ovs-t4p58\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.406817 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle\") pod \"ovn-controller-sm6db\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.527172 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.537467 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.934218 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.935501 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.940770 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.940807 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.941089 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-ctkf7" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.941218 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.941338 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.944228 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992643 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992694 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992721 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992764 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992827 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992847 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992874 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:11 crc kubenswrapper[4998]: I0203 07:04:11.992904 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chx9k\" (UniqueName: \"kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095251 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095349 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chx9k\" (UniqueName: \"kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095390 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095436 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095457 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095505 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095559 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.095582 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.096401 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.097075 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.100497 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.107742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.108806 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.112078 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.113663 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.114401 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chx9k\" (UniqueName: \"kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.121940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:12 crc kubenswrapper[4998]: I0203 07:04:12.273658 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.489021 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.521636 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.524877 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.526257 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.527851 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-xpms6" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.528385 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.554824 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555467 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555556 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555617 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555660 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555711 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.555742 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.557622 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.557719 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqb2r\" (UniqueName: \"kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659313 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659433 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659484 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659517 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659560 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659591 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqb2r\" (UniqueName: \"kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.659636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.660105 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.660663 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.661592 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.661855 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.668488 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.675309 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.678250 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqb2r\" (UniqueName: \"kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.678896 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.702324 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:14 crc kubenswrapper[4998]: I0203 07:04:14.885210 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:15 crc kubenswrapper[4998]: E0203 07:04:15.953553 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:f391b842000dadaeb692eb6b5e845c2aa3125ef24679fbb4af2c8b98252de4b2" Feb 03 07:04:15 crc kubenswrapper[4998]: E0203 07:04:15.954603 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:f391b842000dadaeb692eb6b5e845c2aa3125ef24679fbb4af2c8b98252de4b2,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4w9fr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6fcf94d689-76vgd_openstack(687e12c8-30df-4d4f-9c72-740fde397ad4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:04:15 crc kubenswrapper[4998]: E0203 07:04:15.956102 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" podUID="687e12c8-30df-4d4f-9c72-740fde397ad4" Feb 03 07:04:16 crc kubenswrapper[4998]: E0203 07:04:16.011159 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:f391b842000dadaeb692eb6b5e845c2aa3125ef24679fbb4af2c8b98252de4b2" Feb 03 07:04:16 crc kubenswrapper[4998]: E0203 07:04:16.011325 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:f391b842000dadaeb692eb6b5e845c2aa3125ef24679fbb4af2c8b98252de4b2,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p7dpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-855cbc58c5-2svpw_openstack(aff94286-96c8-4ac9-9722-578d56b031d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:04:16 crc kubenswrapper[4998]: E0203 07:04:16.013221 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" podUID="aff94286-96c8-4ac9-9722-578d56b031d0" Feb 03 07:04:16 crc kubenswrapper[4998]: I0203 07:04:16.410273 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:04:24 crc kubenswrapper[4998]: W0203 07:04:24.971434 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4485520f_24df_4521_b5e9_6076ffa00bd6.slice/crio-02914a77ca230a2af0025a1f9a22910193e3f32f7883122f185a9fcf0b7f2d07 WatchSource:0}: Error finding container 02914a77ca230a2af0025a1f9a22910193e3f32f7883122f185a9fcf0b7f2d07: Status 404 returned error can't find the container with id 02914a77ca230a2af0025a1f9a22910193e3f32f7883122f185a9fcf0b7f2d07 Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.069071 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.080999 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.099299 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerStarted","Data":"02914a77ca230a2af0025a1f9a22910193e3f32f7883122f185a9fcf0b7f2d07"} Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.100149 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" event={"ID":"aff94286-96c8-4ac9-9722-578d56b031d0","Type":"ContainerDied","Data":"4b7defbbb4fa989e1d6fe3aa1d3b5437bb16e91e87b2a7aa97ef7c8f4c4e99a4"} Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.100189 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-855cbc58c5-2svpw" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.100880 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" event={"ID":"687e12c8-30df-4d4f-9c72-740fde397ad4","Type":"ContainerDied","Data":"256d2ab1b451302bf60e832f2225f5d9f81cd79a1c616617916d0405e8a3e6df"} Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.100934 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcf94d689-76vgd" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.138156 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config\") pod \"aff94286-96c8-4ac9-9722-578d56b031d0\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.138242 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7dpt\" (UniqueName: \"kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt\") pod \"aff94286-96c8-4ac9-9722-578d56b031d0\" (UID: \"aff94286-96c8-4ac9-9722-578d56b031d0\") " Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.138376 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc\") pod \"687e12c8-30df-4d4f-9c72-740fde397ad4\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.138451 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w9fr\" (UniqueName: \"kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr\") pod \"687e12c8-30df-4d4f-9c72-740fde397ad4\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.138535 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config\") pod \"687e12c8-30df-4d4f-9c72-740fde397ad4\" (UID: \"687e12c8-30df-4d4f-9c72-740fde397ad4\") " Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.139386 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config" (OuterVolumeSpecName: "config") pod "687e12c8-30df-4d4f-9c72-740fde397ad4" (UID: "687e12c8-30df-4d4f-9c72-740fde397ad4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.139830 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config" (OuterVolumeSpecName: "config") pod "aff94286-96c8-4ac9-9722-578d56b031d0" (UID: "aff94286-96c8-4ac9-9722-578d56b031d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.139841 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "687e12c8-30df-4d4f-9c72-740fde397ad4" (UID: "687e12c8-30df-4d4f-9c72-740fde397ad4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.144948 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr" (OuterVolumeSpecName: "kube-api-access-4w9fr") pod "687e12c8-30df-4d4f-9c72-740fde397ad4" (UID: "687e12c8-30df-4d4f-9c72-740fde397ad4"). InnerVolumeSpecName "kube-api-access-4w9fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.151073 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt" (OuterVolumeSpecName: "kube-api-access-p7dpt") pod "aff94286-96c8-4ac9-9722-578d56b031d0" (UID: "aff94286-96c8-4ac9-9722-578d56b031d0"). InnerVolumeSpecName "kube-api-access-p7dpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.241020 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.241058 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aff94286-96c8-4ac9-9722-578d56b031d0-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.241073 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7dpt\" (UniqueName: \"kubernetes.io/projected/aff94286-96c8-4ac9-9722-578d56b031d0-kube-api-access-p7dpt\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.241087 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/687e12c8-30df-4d4f-9c72-740fde397ad4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.241101 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w9fr\" (UniqueName: \"kubernetes.io/projected/687e12c8-30df-4d4f-9c72-740fde397ad4-kube-api-access-4w9fr\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.517714 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.536609 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fcf94d689-76vgd"] Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.588578 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.599064 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-855cbc58c5-2svpw"] Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.848682 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:04:25 crc kubenswrapper[4998]: W0203 07:04:25.850264 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod217d1e8d_a95d_4152_a8d9_e843cd3b7260.slice/crio-4327206695f6fc4ed7a7665b971c8e9f1a942858a93e90a6e29cef9a68e47826 WatchSource:0}: Error finding container 4327206695f6fc4ed7a7665b971c8e9f1a942858a93e90a6e29cef9a68e47826: Status 404 returned error can't find the container with id 4327206695f6fc4ed7a7665b971c8e9f1a942858a93e90a6e29cef9a68e47826 Feb 03 07:04:25 crc kubenswrapper[4998]: W0203 07:04:25.921640 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c2bbe1b_74b6_4e3c_8468_735ad0b00146.slice/crio-9018affb46e48c99b7178d1e37d5d7a5ca88267d23450c11def1be28b1dca6e1 WatchSource:0}: Error finding container 9018affb46e48c99b7178d1e37d5d7a5ca88267d23450c11def1be28b1dca6e1: Status 404 returned error can't find the container with id 9018affb46e48c99b7178d1e37d5d7a5ca88267d23450c11def1be28b1dca6e1 Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.923264 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 07:04:25 crc kubenswrapper[4998]: I0203 07:04:25.929418 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.028856 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:04:26 crc kubenswrapper[4998]: W0203 07:04:26.031365 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode603cc71_c5b7_4f97_9ed9_3c6d114ddb8c.slice/crio-aa92e997914c418cfb3dbeff957f565f5f74dc6c47ec4274b19102fc0ba144d2 WatchSource:0}: Error finding container aa92e997914c418cfb3dbeff957f565f5f74dc6c47ec4274b19102fc0ba144d2: Status 404 returned error can't find the container with id aa92e997914c418cfb3dbeff957f565f5f74dc6c47ec4274b19102fc0ba144d2 Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.110108 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerStarted","Data":"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.112956 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db" event={"ID":"5a08e2bf-f0a7-4812-8137-c305d886f174","Type":"ContainerStarted","Data":"f39f961bafcf52c55a3146606cc5171ad1de7de8b921f0c81a04745c8749b887"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.116274 4998 generic.go:334] "Generic (PLEG): container finished" podID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerID="da40d3ae9e0fb4a730588154d5aae63f5fafc0fc036902d80ca808d54e2b4008" exitCode=0 Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.116352 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" event={"ID":"ba09b0ed-2387-4b93-85b8-12d159155c7f","Type":"ContainerDied","Data":"da40d3ae9e0fb4a730588154d5aae63f5fafc0fc036902d80ca808d54e2b4008"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.120671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerStarted","Data":"15a9b706d9e763c27648d5c9f3540ad2007d24d255d06bd65df06844ced8658f"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.126389 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"217d1e8d-a95d-4152-a8d9-e843cd3b7260","Type":"ContainerStarted","Data":"4327206695f6fc4ed7a7665b971c8e9f1a942858a93e90a6e29cef9a68e47826"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.129407 4998 generic.go:334] "Generic (PLEG): container finished" podID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerID="8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09" exitCode=0 Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.129506 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" event={"ID":"f2019872-2dfd-4470-883b-6dbbc86eb084","Type":"ContainerDied","Data":"8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.131287 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerStarted","Data":"aa92e997914c418cfb3dbeff957f565f5f74dc6c47ec4274b19102fc0ba144d2"} Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.136406 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3c2bbe1b-74b6-4e3c-8468-735ad0b00146","Type":"ContainerStarted","Data":"9018affb46e48c99b7178d1e37d5d7a5ca88267d23450c11def1be28b1dca6e1"} Feb 03 07:04:26 crc kubenswrapper[4998]: W0203 07:04:26.153762 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94b790fb_3209_436d_b48d_f3978a82a557.slice/crio-c0b1024ca651de5b8b9c86ca0714addfa49640f30d386a5747bc7b906237762e WatchSource:0}: Error finding container c0b1024ca651de5b8b9c86ca0714addfa49640f30d386a5747bc7b906237762e: Status 404 returned error can't find the container with id c0b1024ca651de5b8b9c86ca0714addfa49640f30d386a5747bc7b906237762e Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.157384 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.435449 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="687e12c8-30df-4d4f-9c72-740fde397ad4" path="/var/lib/kubelet/pods/687e12c8-30df-4d4f-9c72-740fde397ad4/volumes" Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.436400 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aff94286-96c8-4ac9-9722-578d56b031d0" path="/var/lib/kubelet/pods/aff94286-96c8-4ac9-9722-578d56b031d0/volumes" Feb 03 07:04:26 crc kubenswrapper[4998]: I0203 07:04:26.706669 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:04:26 crc kubenswrapper[4998]: E0203 07:04:26.713740 4998 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Feb 03 07:04:26 crc kubenswrapper[4998]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/ba09b0ed-2387-4b93-85b8-12d159155c7f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 07:04:26 crc kubenswrapper[4998]: > podSandboxID="5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6" Feb 03 07:04:26 crc kubenswrapper[4998]: E0203 07:04:26.714117 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:04:26 crc kubenswrapper[4998]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:f391b842000dadaeb692eb6b5e845c2aa3125ef24679fbb4af2c8b98252de4b2,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ghcv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-f54874ffc-qbqgv_openstack(ba09b0ed-2387-4b93-85b8-12d159155c7f): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/ba09b0ed-2387-4b93-85b8-12d159155c7f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 07:04:26 crc kubenswrapper[4998]: > logger="UnhandledError" Feb 03 07:04:26 crc kubenswrapper[4998]: E0203 07:04:26.715277 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/ba09b0ed-2387-4b93-85b8-12d159155c7f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" Feb 03 07:04:26 crc kubenswrapper[4998]: W0203 07:04:26.729202 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e71558d_268c_4680_b43c_9fb48f34b38f.slice/crio-9c4576de3e57b771dece917b27c46e4e33a5bf8685c225433d6735f65ab46790 WatchSource:0}: Error finding container 9c4576de3e57b771dece917b27c46e4e33a5bf8685c225433d6735f65ab46790: Status 404 returned error can't find the container with id 9c4576de3e57b771dece917b27c46e4e33a5bf8685c225433d6735f65ab46790 Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.146637 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerStarted","Data":"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a"} Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.147866 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerStarted","Data":"c0b1024ca651de5b8b9c86ca0714addfa49640f30d386a5747bc7b906237762e"} Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.150681 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerStarted","Data":"9c4576de3e57b771dece917b27c46e4e33a5bf8685c225433d6735f65ab46790"} Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.153725 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" event={"ID":"f2019872-2dfd-4470-883b-6dbbc86eb084","Type":"ContainerStarted","Data":"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284"} Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.154522 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.156510 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerStarted","Data":"76b83e5ba7d49d4a25c729bc84136855824cc063c69e5eb9aff88e90d282666d"} Feb 03 07:04:27 crc kubenswrapper[4998]: I0203 07:04:27.190638 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" podStartSLOduration=3.063267936 podStartE2EDuration="26.190619268s" podCreationTimestamp="2026-02-03 07:04:01 +0000 UTC" firstStartedPulling="2026-02-03 07:04:02.217466554 +0000 UTC m=+1080.504160360" lastFinishedPulling="2026-02-03 07:04:25.344817886 +0000 UTC m=+1103.631511692" observedRunningTime="2026-02-03 07:04:27.189449024 +0000 UTC m=+1105.476142840" watchObservedRunningTime="2026-02-03 07:04:27.190619268 +0000 UTC m=+1105.477313074" Feb 03 07:04:29 crc kubenswrapper[4998]: I0203 07:04:29.175047 4998 generic.go:334] "Generic (PLEG): container finished" podID="f5714626-00c5-4b11-b056-40ff428fc017" containerID="15a9b706d9e763c27648d5c9f3540ad2007d24d255d06bd65df06844ced8658f" exitCode=0 Feb 03 07:04:29 crc kubenswrapper[4998]: I0203 07:04:29.175163 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerDied","Data":"15a9b706d9e763c27648d5c9f3540ad2007d24d255d06bd65df06844ced8658f"} Feb 03 07:04:30 crc kubenswrapper[4998]: I0203 07:04:30.186325 4998 generic.go:334] "Generic (PLEG): container finished" podID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerID="9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99" exitCode=0 Feb 03 07:04:30 crc kubenswrapper[4998]: I0203 07:04:30.186397 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerDied","Data":"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99"} Feb 03 07:04:31 crc kubenswrapper[4998]: I0203 07:04:31.757519 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:31 crc kubenswrapper[4998]: I0203 07:04:31.804214 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.207918 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerStarted","Data":"f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.210255 4998 generic.go:334] "Generic (PLEG): container finished" podID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerID="ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77" exitCode=0 Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.210321 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerDied","Data":"ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.226296 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerStarted","Data":"a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.229209 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3c2bbe1b-74b6-4e3c-8468-735ad0b00146","Type":"ContainerStarted","Data":"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.229344 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.239617 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerStarted","Data":"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.242905 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerStarted","Data":"7fc3cd4817b7865d5e94684c7fc6231e72d32602e47f8979e462f57f046e481a"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.244855 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"217d1e8d-a95d-4152-a8d9-e843cd3b7260","Type":"ContainerStarted","Data":"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.245050 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.246294 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db" event={"ID":"5a08e2bf-f0a7-4812-8137-c305d886f174","Type":"ContainerStarted","Data":"a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.246476 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-sm6db" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.251570 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" event={"ID":"ba09b0ed-2387-4b93-85b8-12d159155c7f","Type":"ContainerStarted","Data":"b0e3135bc61b1626cfe74dd7ffee1f8ebf4fa81d82f114e79010f590b3fe02ca"} Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.251748 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="dnsmasq-dns" containerID="cri-o://b0e3135bc61b1626cfe74dd7ffee1f8ebf4fa81d82f114e79010f590b3fe02ca" gracePeriod=10 Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.251904 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.260674 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=23.148973164 podStartE2EDuration="27.260652295s" podCreationTimestamp="2026-02-03 07:04:05 +0000 UTC" firstStartedPulling="2026-02-03 07:04:25.923819711 +0000 UTC m=+1104.210513517" lastFinishedPulling="2026-02-03 07:04:30.035498852 +0000 UTC m=+1108.322192648" observedRunningTime="2026-02-03 07:04:32.251841562 +0000 UTC m=+1110.538535378" watchObservedRunningTime="2026-02-03 07:04:32.260652295 +0000 UTC m=+1110.547346101" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.274094 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=19.878405919 podStartE2EDuration="25.274066449s" podCreationTimestamp="2026-02-03 07:04:07 +0000 UTC" firstStartedPulling="2026-02-03 07:04:25.856308776 +0000 UTC m=+1104.143002582" lastFinishedPulling="2026-02-03 07:04:31.251969306 +0000 UTC m=+1109.538663112" observedRunningTime="2026-02-03 07:04:32.269492888 +0000 UTC m=+1110.556186694" watchObservedRunningTime="2026-02-03 07:04:32.274066449 +0000 UTC m=+1110.560760265" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.292516 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=13.078281719 podStartE2EDuration="30.292495518s" podCreationTimestamp="2026-02-03 07:04:02 +0000 UTC" firstStartedPulling="2026-02-03 07:04:08.141210411 +0000 UTC m=+1086.427904217" lastFinishedPulling="2026-02-03 07:04:25.35542421 +0000 UTC m=+1103.642118016" observedRunningTime="2026-02-03 07:04:32.291706265 +0000 UTC m=+1110.578400091" watchObservedRunningTime="2026-02-03 07:04:32.292495518 +0000 UTC m=+1110.579189324" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.311798 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-sm6db" podStartSLOduration=16.860255529 podStartE2EDuration="21.31176543s" podCreationTimestamp="2026-02-03 07:04:11 +0000 UTC" firstStartedPulling="2026-02-03 07:04:25.930191613 +0000 UTC m=+1104.216885419" lastFinishedPulling="2026-02-03 07:04:30.381701504 +0000 UTC m=+1108.668395320" observedRunningTime="2026-02-03 07:04:32.310704239 +0000 UTC m=+1110.597398065" watchObservedRunningTime="2026-02-03 07:04:32.31176543 +0000 UTC m=+1110.598459236" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.334220 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=27.705242547 podStartE2EDuration="28.334204213s" podCreationTimestamp="2026-02-03 07:04:04 +0000 UTC" firstStartedPulling="2026-02-03 07:04:25.073035927 +0000 UTC m=+1103.359729733" lastFinishedPulling="2026-02-03 07:04:25.701997593 +0000 UTC m=+1103.988691399" observedRunningTime="2026-02-03 07:04:32.331700961 +0000 UTC m=+1110.618394777" watchObservedRunningTime="2026-02-03 07:04:32.334204213 +0000 UTC m=+1110.620898019" Feb 03 07:04:32 crc kubenswrapper[4998]: I0203 07:04:32.357005 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" podStartSLOduration=8.908327024 podStartE2EDuration="32.356980646s" podCreationTimestamp="2026-02-03 07:04:00 +0000 UTC" firstStartedPulling="2026-02-03 07:04:01.88555299 +0000 UTC m=+1080.172246796" lastFinishedPulling="2026-02-03 07:04:25.334206612 +0000 UTC m=+1103.620900418" observedRunningTime="2026-02-03 07:04:32.351995913 +0000 UTC m=+1110.638689719" watchObservedRunningTime="2026-02-03 07:04:32.356980646 +0000 UTC m=+1110.643674472" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.266148 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerStarted","Data":"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba"} Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.269423 4998 generic.go:334] "Generic (PLEG): container finished" podID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerID="b0e3135bc61b1626cfe74dd7ffee1f8ebf4fa81d82f114e79010f590b3fe02ca" exitCode=0 Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.270227 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" event={"ID":"ba09b0ed-2387-4b93-85b8-12d159155c7f","Type":"ContainerDied","Data":"b0e3135bc61b1626cfe74dd7ffee1f8ebf4fa81d82f114e79010f590b3fe02ca"} Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.270256 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" event={"ID":"ba09b0ed-2387-4b93-85b8-12d159155c7f","Type":"ContainerDied","Data":"5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6"} Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.270266 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ebbb99c2ee592df0684b89044fc8566f6bfd2a8eebd92b11bc7b0c789316ba6" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.442662 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.593184 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc\") pod \"ba09b0ed-2387-4b93-85b8-12d159155c7f\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.593442 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config\") pod \"ba09b0ed-2387-4b93-85b8-12d159155c7f\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.593513 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghcv9\" (UniqueName: \"kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9\") pod \"ba09b0ed-2387-4b93-85b8-12d159155c7f\" (UID: \"ba09b0ed-2387-4b93-85b8-12d159155c7f\") " Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.598239 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9" (OuterVolumeSpecName: "kube-api-access-ghcv9") pod "ba09b0ed-2387-4b93-85b8-12d159155c7f" (UID: "ba09b0ed-2387-4b93-85b8-12d159155c7f"). InnerVolumeSpecName "kube-api-access-ghcv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.633648 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config" (OuterVolumeSpecName: "config") pod "ba09b0ed-2387-4b93-85b8-12d159155c7f" (UID: "ba09b0ed-2387-4b93-85b8-12d159155c7f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.640717 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba09b0ed-2387-4b93-85b8-12d159155c7f" (UID: "ba09b0ed-2387-4b93-85b8-12d159155c7f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.694664 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.694696 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba09b0ed-2387-4b93-85b8-12d159155c7f-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.694705 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghcv9\" (UniqueName: \"kubernetes.io/projected/ba09b0ed-2387-4b93-85b8-12d159155c7f-kube-api-access-ghcv9\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.986847 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 03 07:04:33 crc kubenswrapper[4998]: I0203 07:04:33.986911 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.277898 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerStarted","Data":"4f8ef037c2d0e4e053ae59278fde31f26ecb573b9d514deb328a8f1afc372f98"} Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.282019 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerStarted","Data":"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28"} Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.282150 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.282206 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.284112 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54874ffc-qbqgv" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.284501 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerStarted","Data":"7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5"} Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.310552 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=14.159414163 podStartE2EDuration="21.310535185s" podCreationTimestamp="2026-02-03 07:04:13 +0000 UTC" firstStartedPulling="2026-02-03 07:04:26.176238875 +0000 UTC m=+1104.462932681" lastFinishedPulling="2026-02-03 07:04:33.327359897 +0000 UTC m=+1111.614053703" observedRunningTime="2026-02-03 07:04:34.304179633 +0000 UTC m=+1112.590873489" watchObservedRunningTime="2026-02-03 07:04:34.310535185 +0000 UTC m=+1112.597228981" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.333491 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-t4p58" podStartSLOduration=19.683985899 podStartE2EDuration="23.333471343s" podCreationTimestamp="2026-02-03 07:04:11 +0000 UTC" firstStartedPulling="2026-02-03 07:04:26.732189709 +0000 UTC m=+1105.018883515" lastFinishedPulling="2026-02-03 07:04:30.381675153 +0000 UTC m=+1108.668368959" observedRunningTime="2026-02-03 07:04:34.331350942 +0000 UTC m=+1112.618044768" watchObservedRunningTime="2026-02-03 07:04:34.333471343 +0000 UTC m=+1112.620165159" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.358288 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=17.057608555 podStartE2EDuration="24.358266143s" podCreationTimestamp="2026-02-03 07:04:10 +0000 UTC" firstStartedPulling="2026-02-03 07:04:26.034187834 +0000 UTC m=+1104.320881640" lastFinishedPulling="2026-02-03 07:04:33.334845422 +0000 UTC m=+1111.621539228" observedRunningTime="2026-02-03 07:04:34.35116791 +0000 UTC m=+1112.637861726" watchObservedRunningTime="2026-02-03 07:04:34.358266143 +0000 UTC m=+1112.644959949" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.371632 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.378771 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f54874ffc-qbqgv"] Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.437825 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" path="/var/lib/kubelet/pods/ba09b0ed-2387-4b93-85b8-12d159155c7f/volumes" Feb 03 07:04:34 crc kubenswrapper[4998]: I0203 07:04:34.885885 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:35 crc kubenswrapper[4998]: I0203 07:04:35.441422 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:35 crc kubenswrapper[4998]: I0203 07:04:35.442193 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:35 crc kubenswrapper[4998]: I0203 07:04:35.886301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:35 crc kubenswrapper[4998]: I0203 07:04:35.954117 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.274645 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.330398 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.344485 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.491969 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.622082 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.625115 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:36 crc kubenswrapper[4998]: E0203 07:04:36.625399 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="init" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.625411 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="init" Feb 03 07:04:36 crc kubenswrapper[4998]: E0203 07:04:36.625439 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="dnsmasq-dns" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.625444 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="dnsmasq-dns" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.625572 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba09b0ed-2387-4b93-85b8-12d159155c7f" containerName="dnsmasq-dns" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.627056 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.629818 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.646606 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.687839 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.689460 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.692075 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.715737 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.753521 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.753620 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqvxs\" (UniqueName: \"kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.753652 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.753681 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.855355 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856286 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856378 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856454 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856523 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqvxs\" (UniqueName: \"kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856554 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856594 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856633 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856719 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkvsm\" (UniqueName: \"kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.856775 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.858334 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.859584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.859641 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.882951 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqvxs\" (UniqueName: \"kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs\") pod \"dnsmasq-dns-86dddb8dc7-t6shx\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.924436 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.925169 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.954548 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.955875 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957717 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957769 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w5fs\" (UniqueName: \"kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957828 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957852 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957887 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957921 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957955 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.957985 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958032 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958083 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958122 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkvsm\" (UniqueName: \"kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958122 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958247 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.958672 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.962982 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.963464 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.966492 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.982865 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:36 crc kubenswrapper[4998]: I0203 07:04:36.993742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkvsm\" (UniqueName: \"kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm\") pod \"ovn-controller-metrics-qtllb\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.009086 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.069955 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w5fs\" (UniqueName: \"kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.070376 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.070430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.070466 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.070498 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.071555 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.072403 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.076296 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.078251 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.100911 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w5fs\" (UniqueName: \"kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs\") pod \"dnsmasq-dns-56df986d9c-4vcgt\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.274042 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.318195 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.376326 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.453279 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.520713 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.586884 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:04:37 crc kubenswrapper[4998]: W0203 07:04:37.618156 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a85f317_a6ed_4d19_8222_136fda8b4517.slice/crio-a1b68346aa52a9066bce516d34daec8fbec9a185b7e9ce9e45b4a1ff1cdade6b WatchSource:0}: Error finding container a1b68346aa52a9066bce516d34daec8fbec9a185b7e9ce9e45b4a1ff1cdade6b: Status 404 returned error can't find the container with id a1b68346aa52a9066bce516d34daec8fbec9a185b7e9ce9e45b4a1ff1cdade6b Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.626095 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.627745 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.634668 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.635071 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.635222 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-8jmjm" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.635338 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.640262 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.694866 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.694977 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.695032 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.695110 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.695215 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9d9h\" (UniqueName: \"kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.695265 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.695580 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.796877 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.796942 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9d9h\" (UniqueName: \"kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.796969 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.797039 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.797065 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.797116 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.797140 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.798257 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.799132 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.799937 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.804382 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.805137 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.805339 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.827022 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9d9h\" (UniqueName: \"kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h\") pod \"ovn-northd-0\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.959101 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:04:37 crc kubenswrapper[4998]: I0203 07:04:37.961979 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.104304 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.207552 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.315618 4998 generic.go:334] "Generic (PLEG): container finished" podID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerID="fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df" exitCode=0 Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.315694 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" event={"ID":"ed67f460-eadc-4355-a6b2-bb5f46615481","Type":"ContainerDied","Data":"fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.315722 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" event={"ID":"ed67f460-eadc-4355-a6b2-bb5f46615481","Type":"ContainerStarted","Data":"d7d85b4aa4ae6033d8da49cf785f2c704ac8ab1d6b5cb36417f4b6535359d23c"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.319099 4998 generic.go:334] "Generic (PLEG): container finished" podID="0bd93b67-5d98-447b-a60f-0f5fa045887e" containerID="34117c9834e526479e5c8922297f511787a4d5b725d7d9a1bc6a94239d26fe0f" exitCode=0 Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.319231 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" event={"ID":"0bd93b67-5d98-447b-a60f-0f5fa045887e","Type":"ContainerDied","Data":"34117c9834e526479e5c8922297f511787a4d5b725d7d9a1bc6a94239d26fe0f"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.319282 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" event={"ID":"0bd93b67-5d98-447b-a60f-0f5fa045887e","Type":"ContainerStarted","Data":"d1966f27a7f11dac3a31310eef24ea8799e1512b2a0a1c90f9133dfdf791e1f4"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.326441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qtllb" event={"ID":"9a85f317-a6ed-4d19-8222-136fda8b4517","Type":"ContainerStarted","Data":"2647dc3bcf763bab29c7e6b082b05a86b066ea5c8e34ab43d3543cc7fce88f34"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.326479 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qtllb" event={"ID":"9a85f317-a6ed-4d19-8222-136fda8b4517","Type":"ContainerStarted","Data":"a1b68346aa52a9066bce516d34daec8fbec9a185b7e9ce9e45b4a1ff1cdade6b"} Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.380096 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-qtllb" podStartSLOduration=2.380081428 podStartE2EDuration="2.380081428s" podCreationTimestamp="2026-02-03 07:04:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:04:38.378855253 +0000 UTC m=+1116.665549059" watchObservedRunningTime="2026-02-03 07:04:38.380081428 +0000 UTC m=+1116.666775234" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.447175 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.616447 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.716157 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb\") pod \"0bd93b67-5d98-447b-a60f-0f5fa045887e\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.716217 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc\") pod \"0bd93b67-5d98-447b-a60f-0f5fa045887e\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.716254 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqvxs\" (UniqueName: \"kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs\") pod \"0bd93b67-5d98-447b-a60f-0f5fa045887e\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.716311 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config\") pod \"0bd93b67-5d98-447b-a60f-0f5fa045887e\" (UID: \"0bd93b67-5d98-447b-a60f-0f5fa045887e\") " Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.728618 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs" (OuterVolumeSpecName: "kube-api-access-rqvxs") pod "0bd93b67-5d98-447b-a60f-0f5fa045887e" (UID: "0bd93b67-5d98-447b-a60f-0f5fa045887e"). InnerVolumeSpecName "kube-api-access-rqvxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.735562 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0bd93b67-5d98-447b-a60f-0f5fa045887e" (UID: "0bd93b67-5d98-447b-a60f-0f5fa045887e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.736304 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0bd93b67-5d98-447b-a60f-0f5fa045887e" (UID: "0bd93b67-5d98-447b-a60f-0f5fa045887e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.737185 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config" (OuterVolumeSpecName: "config") pod "0bd93b67-5d98-447b-a60f-0f5fa045887e" (UID: "0bd93b67-5d98-447b-a60f-0f5fa045887e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.818217 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.818254 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.818264 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd93b67-5d98-447b-a60f-0f5fa045887e-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:38 crc kubenswrapper[4998]: I0203 07:04:38.818274 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqvxs\" (UniqueName: \"kubernetes.io/projected/0bd93b67-5d98-447b-a60f-0f5fa045887e-kube-api-access-rqvxs\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.334429 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerStarted","Data":"2cdd99896bab2110d33a99e37505b18a533323515d07709ebc04752d31e7014b"} Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.337674 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" event={"ID":"ed67f460-eadc-4355-a6b2-bb5f46615481","Type":"ContainerStarted","Data":"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e"} Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.337823 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.339633 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" event={"ID":"0bd93b67-5d98-447b-a60f-0f5fa045887e","Type":"ContainerDied","Data":"d1966f27a7f11dac3a31310eef24ea8799e1512b2a0a1c90f9133dfdf791e1f4"} Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.339811 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dddb8dc7-t6shx" Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.340847 4998 scope.go:117] "RemoveContainer" containerID="34117c9834e526479e5c8922297f511787a4d5b725d7d9a1bc6a94239d26fe0f" Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.361407 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" podStartSLOduration=3.361384893 podStartE2EDuration="3.361384893s" podCreationTimestamp="2026-02-03 07:04:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:04:39.356121962 +0000 UTC m=+1117.642815788" watchObservedRunningTime="2026-02-03 07:04:39.361384893 +0000 UTC m=+1117.648078709" Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.462015 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:39 crc kubenswrapper[4998]: I0203 07:04:39.471709 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86dddb8dc7-t6shx"] Feb 03 07:04:40 crc kubenswrapper[4998]: I0203 07:04:40.441002 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bd93b67-5d98-447b-a60f-0f5fa045887e" path="/var/lib/kubelet/pods/0bd93b67-5d98-447b-a60f-0f5fa045887e/volumes" Feb 03 07:04:40 crc kubenswrapper[4998]: I0203 07:04:40.729930 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.017325 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-2g7ft"] Feb 03 07:04:41 crc kubenswrapper[4998]: E0203 07:04:41.017940 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd93b67-5d98-447b-a60f-0f5fa045887e" containerName="init" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.017968 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd93b67-5d98-447b-a60f-0f5fa045887e" containerName="init" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.018352 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd93b67-5d98-447b-a60f-0f5fa045887e" containerName="init" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.019240 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.024134 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-11d4-account-create-update-s4js8"] Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.025322 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.031486 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.041663 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2g7ft"] Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.071166 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-11d4-account-create-update-s4js8"] Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.159590 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.159667 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.159953 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crdmv\" (UniqueName: \"kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.160117 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc75v\" (UniqueName: \"kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.261939 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crdmv\" (UniqueName: \"kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.262088 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc75v\" (UniqueName: \"kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.262162 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.262224 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.263168 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.263225 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.279453 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc75v\" (UniqueName: \"kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v\") pod \"glance-db-create-2g7ft\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.281681 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crdmv\" (UniqueName: \"kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv\") pod \"glance-11d4-account-create-update-s4js8\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.357591 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.362726 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerStarted","Data":"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db"} Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.362770 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerStarted","Data":"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8"} Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.362963 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.371692 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.389373 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.417684117 podStartE2EDuration="4.389352345s" podCreationTimestamp="2026-02-03 07:04:37 +0000 UTC" firstStartedPulling="2026-02-03 07:04:38.461056719 +0000 UTC m=+1116.747750525" lastFinishedPulling="2026-02-03 07:04:40.432724947 +0000 UTC m=+1118.719418753" observedRunningTime="2026-02-03 07:04:41.380833861 +0000 UTC m=+1119.667527717" watchObservedRunningTime="2026-02-03 07:04:41.389352345 +0000 UTC m=+1119.676046151" Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.816449 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2g7ft"] Feb 03 07:04:41 crc kubenswrapper[4998]: I0203 07:04:41.880474 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-11d4-account-create-update-s4js8"] Feb 03 07:04:41 crc kubenswrapper[4998]: W0203 07:04:41.881544 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd95241e_3a56_48e9_9570_ac7eef566dd4.slice/crio-46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e WatchSource:0}: Error finding container 46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e: Status 404 returned error can't find the container with id 46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.382134 4998 generic.go:334] "Generic (PLEG): container finished" podID="fd95241e-3a56-48e9-9570-ac7eef566dd4" containerID="a491bbb6b2cb80c8238082a8dedea874440d41b3f00f624c337e1bbe06455465" exitCode=0 Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.382233 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-11d4-account-create-update-s4js8" event={"ID":"fd95241e-3a56-48e9-9570-ac7eef566dd4","Type":"ContainerDied","Data":"a491bbb6b2cb80c8238082a8dedea874440d41b3f00f624c337e1bbe06455465"} Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.382293 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-11d4-account-create-update-s4js8" event={"ID":"fd95241e-3a56-48e9-9570-ac7eef566dd4","Type":"ContainerStarted","Data":"46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e"} Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.385071 4998 generic.go:334] "Generic (PLEG): container finished" podID="a33e3d6c-b78a-4335-bf80-e0f7171c5678" containerID="061ac6733e0bb70acdd97b1a3c75dfe63d66d1c4c9298951ec4792658e9cdb4e" exitCode=0 Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.385399 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2g7ft" event={"ID":"a33e3d6c-b78a-4335-bf80-e0f7171c5678","Type":"ContainerDied","Data":"061ac6733e0bb70acdd97b1a3c75dfe63d66d1c4c9298951ec4792658e9cdb4e"} Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.385421 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2g7ft" event={"ID":"a33e3d6c-b78a-4335-bf80-e0f7171c5678","Type":"ContainerStarted","Data":"441b2de04c2d4800262a93d4f9a494e0ed530720430a0a21762a2672b4012cfc"} Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.599924 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-bwzlv"] Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.601113 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.605654 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.609170 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-bwzlv"] Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.791716 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g2s6\" (UniqueName: \"kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.791953 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.893094 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g2s6\" (UniqueName: \"kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.893198 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.894294 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.913431 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g2s6\" (UniqueName: \"kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6\") pod \"root-account-create-update-bwzlv\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:42 crc kubenswrapper[4998]: I0203 07:04:42.916307 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.364583 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-bwzlv"] Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.395494 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bwzlv" event={"ID":"f111db0d-d66c-4eb1-af19-56741a2d109f","Type":"ContainerStarted","Data":"f187030f2ec494b3801b254bba1c0a73ff17a58380e8de5286d620210bbe7a69"} Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.701748 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.714430 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crdmv\" (UniqueName: \"kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv\") pod \"fd95241e-3a56-48e9-9570-ac7eef566dd4\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.714514 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts\") pod \"fd95241e-3a56-48e9-9570-ac7eef566dd4\" (UID: \"fd95241e-3a56-48e9-9570-ac7eef566dd4\") " Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.715283 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd95241e-3a56-48e9-9570-ac7eef566dd4" (UID: "fd95241e-3a56-48e9-9570-ac7eef566dd4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.720698 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv" (OuterVolumeSpecName: "kube-api-access-crdmv") pod "fd95241e-3a56-48e9-9570-ac7eef566dd4" (UID: "fd95241e-3a56-48e9-9570-ac7eef566dd4"). InnerVolumeSpecName "kube-api-access-crdmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.749725 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.816022 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crdmv\" (UniqueName: \"kubernetes.io/projected/fd95241e-3a56-48e9-9570-ac7eef566dd4-kube-api-access-crdmv\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.816055 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd95241e-3a56-48e9-9570-ac7eef566dd4-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.917272 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc75v\" (UniqueName: \"kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v\") pod \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.917462 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts\") pod \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\" (UID: \"a33e3d6c-b78a-4335-bf80-e0f7171c5678\") " Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.917929 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a33e3d6c-b78a-4335-bf80-e0f7171c5678" (UID: "a33e3d6c-b78a-4335-bf80-e0f7171c5678"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:43 crc kubenswrapper[4998]: I0203 07:04:43.920608 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v" (OuterVolumeSpecName: "kube-api-access-nc75v") pod "a33e3d6c-b78a-4335-bf80-e0f7171c5678" (UID: "a33e3d6c-b78a-4335-bf80-e0f7171c5678"). InnerVolumeSpecName "kube-api-access-nc75v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.019346 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a33e3d6c-b78a-4335-bf80-e0f7171c5678-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.019391 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc75v\" (UniqueName: \"kubernetes.io/projected/a33e3d6c-b78a-4335-bf80-e0f7171c5678-kube-api-access-nc75v\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.407738 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2g7ft" event={"ID":"a33e3d6c-b78a-4335-bf80-e0f7171c5678","Type":"ContainerDied","Data":"441b2de04c2d4800262a93d4f9a494e0ed530720430a0a21762a2672b4012cfc"} Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.407767 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2g7ft" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.407804 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="441b2de04c2d4800262a93d4f9a494e0ed530720430a0a21762a2672b4012cfc" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.410171 4998 generic.go:334] "Generic (PLEG): container finished" podID="f111db0d-d66c-4eb1-af19-56741a2d109f" containerID="ac9c68809e2d90ac2e9b04b6da1d48989afb04d2b09a5e840256a3d3c2cf1c3c" exitCode=0 Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.410275 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bwzlv" event={"ID":"f111db0d-d66c-4eb1-af19-56741a2d109f","Type":"ContainerDied","Data":"ac9c68809e2d90ac2e9b04b6da1d48989afb04d2b09a5e840256a3d3c2cf1c3c"} Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.413806 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-11d4-account-create-update-s4js8" event={"ID":"fd95241e-3a56-48e9-9570-ac7eef566dd4","Type":"ContainerDied","Data":"46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e"} Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.413846 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46514dfa8c1bd176fabc3c3941431a0dfc60be7ec573ae9c25efbf0cdacd2d8e" Feb 03 07:04:44 crc kubenswrapper[4998]: I0203 07:04:44.413851 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-11d4-account-create-update-s4js8" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.338329 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-jt7wn"] Feb 03 07:04:45 crc kubenswrapper[4998]: E0203 07:04:45.338880 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd95241e-3a56-48e9-9570-ac7eef566dd4" containerName="mariadb-account-create-update" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.338905 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd95241e-3a56-48e9-9570-ac7eef566dd4" containerName="mariadb-account-create-update" Feb 03 07:04:45 crc kubenswrapper[4998]: E0203 07:04:45.338926 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a33e3d6c-b78a-4335-bf80-e0f7171c5678" containerName="mariadb-database-create" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.338938 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a33e3d6c-b78a-4335-bf80-e0f7171c5678" containerName="mariadb-database-create" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.339216 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a33e3d6c-b78a-4335-bf80-e0f7171c5678" containerName="mariadb-database-create" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.339242 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd95241e-3a56-48e9-9570-ac7eef566dd4" containerName="mariadb-account-create-update" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.340061 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.350546 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jt7wn"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.449243 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9c9a-account-create-update-c26qq"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.450251 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.452669 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.477553 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c9a-account-create-update-c26qq"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.543005 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.543146 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z78tl\" (UniqueName: \"kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.623242 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-6799k"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.626405 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.630595 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-6799k"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644269 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z78tl\" (UniqueName: \"kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644329 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644454 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644507 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644543 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx5jp\" (UniqueName: \"kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.644571 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb8lt\" (UniqueName: \"kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.645586 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.672603 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z78tl\" (UniqueName: \"kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl\") pod \"keystone-db-create-jt7wn\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.742688 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b3a0-account-create-update-2r7bm"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.743834 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.745854 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.745989 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746059 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx5jp\" (UniqueName: \"kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746104 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746137 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm6jq\" (UniqueName: \"kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746167 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb8lt\" (UniqueName: \"kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746300 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.746636 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.747214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.752626 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.753513 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b3a0-account-create-update-2r7bm"] Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.766163 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx5jp\" (UniqueName: \"kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp\") pod \"placement-db-create-6799k\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.770057 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb8lt\" (UniqueName: \"kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt\") pod \"keystone-9c9a-account-create-update-c26qq\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.778371 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.847460 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g2s6\" (UniqueName: \"kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6\") pod \"f111db0d-d66c-4eb1-af19-56741a2d109f\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.847615 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts\") pod \"f111db0d-d66c-4eb1-af19-56741a2d109f\" (UID: \"f111db0d-d66c-4eb1-af19-56741a2d109f\") " Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.847753 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm6jq\" (UniqueName: \"kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.847796 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.848806 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f111db0d-d66c-4eb1-af19-56741a2d109f" (UID: "f111db0d-d66c-4eb1-af19-56741a2d109f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.848930 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.853287 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6" (OuterVolumeSpecName: "kube-api-access-8g2s6") pod "f111db0d-d66c-4eb1-af19-56741a2d109f" (UID: "f111db0d-d66c-4eb1-af19-56741a2d109f"). InnerVolumeSpecName "kube-api-access-8g2s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.864728 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm6jq\" (UniqueName: \"kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq\") pod \"placement-b3a0-account-create-update-2r7bm\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.945522 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6799k" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.949029 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f111db0d-d66c-4eb1-af19-56741a2d109f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.949096 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g2s6\" (UniqueName: \"kubernetes.io/projected/f111db0d-d66c-4eb1-af19-56741a2d109f-kube-api-access-8g2s6\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:45 crc kubenswrapper[4998]: I0203 07:04:45.956511 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.064959 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.162646 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-kvz5p"] Feb 03 07:04:46 crc kubenswrapper[4998]: E0203 07:04:46.164526 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f111db0d-d66c-4eb1-af19-56741a2d109f" containerName="mariadb-account-create-update" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.164543 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f111db0d-d66c-4eb1-af19-56741a2d109f" containerName="mariadb-account-create-update" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.164731 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f111db0d-d66c-4eb1-af19-56741a2d109f" containerName="mariadb-account-create-update" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.165260 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.168465 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-w8hrx" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.170259 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.184296 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kvz5p"] Feb 03 07:04:46 crc kubenswrapper[4998]: W0203 07:04:46.205003 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2902281_e838_41cf_bc34_89850ed0cf83.slice/crio-34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c WatchSource:0}: Error finding container 34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c: Status 404 returned error can't find the container with id 34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.209859 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c9a-account-create-update-c26qq"] Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.253127 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.253174 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk5kj\" (UniqueName: \"kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.253202 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.253392 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.354797 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.354911 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.354955 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk5kj\" (UniqueName: \"kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.354989 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.362434 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.364001 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.364193 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.369499 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jt7wn"] Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.375333 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk5kj\" (UniqueName: \"kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj\") pod \"glance-db-sync-kvz5p\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.441696 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bwzlv" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.446706 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jt7wn" event={"ID":"6732bc1e-c4ad-4519-bcfd-3c973c7528a6","Type":"ContainerStarted","Data":"f6a67d6d03a4e0a7c4c55ab591b0dba1139ffc2ba4ffcdd5c1e3851947534b6a"} Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.446770 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bwzlv" event={"ID":"f111db0d-d66c-4eb1-af19-56741a2d109f","Type":"ContainerDied","Data":"f187030f2ec494b3801b254bba1c0a73ff17a58380e8de5286d620210bbe7a69"} Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.446844 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f187030f2ec494b3801b254bba1c0a73ff17a58380e8de5286d620210bbe7a69" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.446853 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c9a-account-create-update-c26qq" event={"ID":"b2902281-e838-41cf-bc34-89850ed0cf83","Type":"ContainerStarted","Data":"d1d6af522a783d7674ee61fcb657dee27565b766177bbd086e16a0647a84bd0d"} Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.446875 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c9a-account-create-update-c26qq" event={"ID":"b2902281-e838-41cf-bc34-89850ed0cf83","Type":"ContainerStarted","Data":"34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c"} Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.469228 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-6799k"] Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.469813 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-9c9a-account-create-update-c26qq" podStartSLOduration=1.469768529 podStartE2EDuration="1.469768529s" podCreationTimestamp="2026-02-03 07:04:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:04:46.461946675 +0000 UTC m=+1124.748640501" watchObservedRunningTime="2026-02-03 07:04:46.469768529 +0000 UTC m=+1124.756462335" Feb 03 07:04:46 crc kubenswrapper[4998]: W0203 07:04:46.480530 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5284078_0b06_4c22_bf9b_87b31d8f5e0f.slice/crio-5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4 WatchSource:0}: Error finding container 5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4: Status 404 returned error can't find the container with id 5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4 Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.499566 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kvz5p" Feb 03 07:04:46 crc kubenswrapper[4998]: I0203 07:04:46.565639 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b3a0-account-create-update-2r7bm"] Feb 03 07:04:46 crc kubenswrapper[4998]: W0203 07:04:46.576861 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1e61909_9629_4f0e_8393_508ce99aff63.slice/crio-cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c WatchSource:0}: Error finding container cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c: Status 404 returned error can't find the container with id cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c Feb 03 07:04:46 crc kubenswrapper[4998]: E0203 07:04:46.949943 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5284078_0b06_4c22_bf9b_87b31d8f5e0f.slice/crio-3fe1b15374bfe70f0ac51f95cba99d06796b653c66388ab59d23f43ca14f0122.scope\": RecentStats: unable to find data in memory cache]" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.031589 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kvz5p"] Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.379950 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.460964 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.461220 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="dnsmasq-dns" containerID="cri-o://12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284" gracePeriod=10 Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.520286 4998 generic.go:334] "Generic (PLEG): container finished" podID="b2902281-e838-41cf-bc34-89850ed0cf83" containerID="d1d6af522a783d7674ee61fcb657dee27565b766177bbd086e16a0647a84bd0d" exitCode=0 Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.520679 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c9a-account-create-update-c26qq" event={"ID":"b2902281-e838-41cf-bc34-89850ed0cf83","Type":"ContainerDied","Data":"d1d6af522a783d7674ee61fcb657dee27565b766177bbd086e16a0647a84bd0d"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.538671 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.541195 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.545041 4998 generic.go:334] "Generic (PLEG): container finished" podID="6732bc1e-c4ad-4519-bcfd-3c973c7528a6" containerID="d7fe5bc42fb67c153e920d4f68e75da5b8c4e36ef046433582efb38a391c6014" exitCode=0 Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.545201 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jt7wn" event={"ID":"6732bc1e-c4ad-4519-bcfd-3c973c7528a6","Type":"ContainerDied","Data":"d7fe5bc42fb67c153e920d4f68e75da5b8c4e36ef046433582efb38a391c6014"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.560392 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.569450 4998 generic.go:334] "Generic (PLEG): container finished" podID="c5284078-0b06-4c22-bf9b-87b31d8f5e0f" containerID="3fe1b15374bfe70f0ac51f95cba99d06796b653c66388ab59d23f43ca14f0122" exitCode=0 Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.569667 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6799k" event={"ID":"c5284078-0b06-4c22-bf9b-87b31d8f5e0f","Type":"ContainerDied","Data":"3fe1b15374bfe70f0ac51f95cba99d06796b653c66388ab59d23f43ca14f0122"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.569692 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6799k" event={"ID":"c5284078-0b06-4c22-bf9b-87b31d8f5e0f","Type":"ContainerStarted","Data":"5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.598330 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kvz5p" event={"ID":"0c5507ca-3689-4044-8e7c-37627a2f2759","Type":"ContainerStarted","Data":"2c10ac4a4935c7a0a4a12130685f0e7194593fb64878fab62f4d4188e66033fa"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.617746 4998 generic.go:334] "Generic (PLEG): container finished" podID="a1e61909-9629-4f0e-8393-508ce99aff63" containerID="3a5719b9e841ae2459fb7dd3047f31ab6b4c642cf9f14d11a5ef0e3e58cedb5c" exitCode=0 Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.617806 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b3a0-account-create-update-2r7bm" event={"ID":"a1e61909-9629-4f0e-8393-508ce99aff63","Type":"ContainerDied","Data":"3a5719b9e841ae2459fb7dd3047f31ab6b4c642cf9f14d11a5ef0e3e58cedb5c"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.617830 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b3a0-account-create-update-2r7bm" event={"ID":"a1e61909-9629-4f0e-8393-508ce99aff63","Type":"ContainerStarted","Data":"cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c"} Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.688044 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.688355 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.688440 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.688532 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.688624 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r684x\" (UniqueName: \"kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.790339 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.790377 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.790414 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.790454 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r684x\" (UniqueName: \"kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.790506 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.791532 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.791736 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.791946 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.792190 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.818294 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r684x\" (UniqueName: \"kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x\") pod \"dnsmasq-dns-66b577f8c-wf7kr\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:47 crc kubenswrapper[4998]: I0203 07:04:47.955282 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.229515 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.401437 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzw76\" (UniqueName: \"kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76\") pod \"f2019872-2dfd-4470-883b-6dbbc86eb084\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.401506 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc\") pod \"f2019872-2dfd-4470-883b-6dbbc86eb084\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.401546 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config\") pod \"f2019872-2dfd-4470-883b-6dbbc86eb084\" (UID: \"f2019872-2dfd-4470-883b-6dbbc86eb084\") " Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.414173 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76" (OuterVolumeSpecName: "kube-api-access-fzw76") pod "f2019872-2dfd-4470-883b-6dbbc86eb084" (UID: "f2019872-2dfd-4470-883b-6dbbc86eb084"). InnerVolumeSpecName "kube-api-access-fzw76". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.444270 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config" (OuterVolumeSpecName: "config") pod "f2019872-2dfd-4470-883b-6dbbc86eb084" (UID: "f2019872-2dfd-4470-883b-6dbbc86eb084"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.466220 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2019872-2dfd-4470-883b-6dbbc86eb084" (UID: "f2019872-2dfd-4470-883b-6dbbc86eb084"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.503936 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzw76\" (UniqueName: \"kubernetes.io/projected/f2019872-2dfd-4470-883b-6dbbc86eb084-kube-api-access-fzw76\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.503970 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.503979 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2019872-2dfd-4470-883b-6dbbc86eb084-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:48 crc kubenswrapper[4998]: W0203 07:04:48.520642 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc394067f_a8c9_412f_b55b_cd70e1081299.slice/crio-731d1686e40519edd327df359dae4ea3b2fd2612b08abf6caf6932569e34a58a WatchSource:0}: Error finding container 731d1686e40519edd327df359dae4ea3b2fd2612b08abf6caf6932569e34a58a: Status 404 returned error can't find the container with id 731d1686e40519edd327df359dae4ea3b2fd2612b08abf6caf6932569e34a58a Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.527827 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.628702 4998 generic.go:334] "Generic (PLEG): container finished" podID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerID="12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284" exitCode=0 Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.628838 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" event={"ID":"f2019872-2dfd-4470-883b-6dbbc86eb084","Type":"ContainerDied","Data":"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284"} Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.629102 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" event={"ID":"f2019872-2dfd-4470-883b-6dbbc86eb084","Type":"ContainerDied","Data":"c8e88d906cacbefac3e835df6afab736b5348a179a8bb6cef460d39dde2c79de"} Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.629129 4998 scope.go:117] "RemoveContainer" containerID="12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.629501 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67ff45466c-fjqgp" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.642948 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" event={"ID":"c394067f-a8c9-412f-b55b-cd70e1081299","Type":"ContainerStarted","Data":"731d1686e40519edd327df359dae4ea3b2fd2612b08abf6caf6932569e34a58a"} Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.674593 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.674927 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="init" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.674942 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="init" Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.674960 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="dnsmasq-dns" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.674966 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="dnsmasq-dns" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.675138 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" containerName="dnsmasq-dns" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.680046 4998 scope.go:117] "RemoveContainer" containerID="8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.685133 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.689556 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.690005 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.690170 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.692038 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-85rzz" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.696203 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.702226 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67ff45466c-fjqgp"] Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.707806 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.746776 4998 scope.go:117] "RemoveContainer" containerID="12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284" Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.750149 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284\": container with ID starting with 12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284 not found: ID does not exist" containerID="12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.750187 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284"} err="failed to get container status \"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284\": rpc error: code = NotFound desc = could not find container \"12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284\": container with ID starting with 12d7ca4c0757835ffcdddda93220b27f60de75e80e05bdecf4adbc50637ef284 not found: ID does not exist" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.750210 4998 scope.go:117] "RemoveContainer" containerID="8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09" Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.751044 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09\": container with ID starting with 8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09 not found: ID does not exist" containerID="8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.751067 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09"} err="failed to get container status \"8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09\": rpc error: code = NotFound desc = could not find container \"8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09\": container with ID starting with 8f6945cd3ded327f2e84bb4df1e5c7a934800312ab231d8e7bb263792491fd09 not found: ID does not exist" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809440 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809530 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809575 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809748 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809801 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.809830 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fpss\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.912851 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913185 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913322 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913351 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913410 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913435 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.913466 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fpss\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.913617 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.913679 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:04:48 crc kubenswrapper[4998]: E0203 07:04:48.913742 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:04:49.413720594 +0000 UTC m=+1127.700414420 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.914031 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.914252 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.927658 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.945190 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:48 crc kubenswrapper[4998]: I0203 07:04:48.946803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fpss\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.059693 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6799k" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.078463 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-bwzlv"] Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.087707 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-bwzlv"] Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.208261 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.217997 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts\") pod \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.218385 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx5jp\" (UniqueName: \"kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp\") pod \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\" (UID: \"c5284078-0b06-4c22-bf9b-87b31d8f5e0f\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.226750 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp" (OuterVolumeSpecName: "kube-api-access-vx5jp") pod "c5284078-0b06-4c22-bf9b-87b31d8f5e0f" (UID: "c5284078-0b06-4c22-bf9b-87b31d8f5e0f"). InnerVolumeSpecName "kube-api-access-vx5jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.230161 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c5284078-0b06-4c22-bf9b-87b31d8f5e0f" (UID: "c5284078-0b06-4c22-bf9b-87b31d8f5e0f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.238888 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.240614 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319267 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z78tl\" (UniqueName: \"kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl\") pod \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319343 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb8lt\" (UniqueName: \"kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt\") pod \"b2902281-e838-41cf-bc34-89850ed0cf83\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319460 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm6jq\" (UniqueName: \"kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq\") pod \"a1e61909-9629-4f0e-8393-508ce99aff63\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319487 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts\") pod \"a1e61909-9629-4f0e-8393-508ce99aff63\" (UID: \"a1e61909-9629-4f0e-8393-508ce99aff63\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319529 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts\") pod \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\" (UID: \"6732bc1e-c4ad-4519-bcfd-3c973c7528a6\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319606 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts\") pod \"b2902281-e838-41cf-bc34-89850ed0cf83\" (UID: \"b2902281-e838-41cf-bc34-89850ed0cf83\") " Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319957 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx5jp\" (UniqueName: \"kubernetes.io/projected/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-kube-api-access-vx5jp\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.319972 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5284078-0b06-4c22-bf9b-87b31d8f5e0f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.320322 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b2902281-e838-41cf-bc34-89850ed0cf83" (UID: "b2902281-e838-41cf-bc34-89850ed0cf83"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.321363 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a1e61909-9629-4f0e-8393-508ce99aff63" (UID: "a1e61909-9629-4f0e-8393-508ce99aff63"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.321442 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6732bc1e-c4ad-4519-bcfd-3c973c7528a6" (UID: "6732bc1e-c4ad-4519-bcfd-3c973c7528a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.323361 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq" (OuterVolumeSpecName: "kube-api-access-hm6jq") pod "a1e61909-9629-4f0e-8393-508ce99aff63" (UID: "a1e61909-9629-4f0e-8393-508ce99aff63"). InnerVolumeSpecName "kube-api-access-hm6jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.323684 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl" (OuterVolumeSpecName: "kube-api-access-z78tl") pod "6732bc1e-c4ad-4519-bcfd-3c973c7528a6" (UID: "6732bc1e-c4ad-4519-bcfd-3c973c7528a6"). InnerVolumeSpecName "kube-api-access-z78tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.323857 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt" (OuterVolumeSpecName: "kube-api-access-xb8lt") pod "b2902281-e838-41cf-bc34-89850ed0cf83" (UID: "b2902281-e838-41cf-bc34-89850ed0cf83"). InnerVolumeSpecName "kube-api-access-xb8lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.421355 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422015 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb8lt\" (UniqueName: \"kubernetes.io/projected/b2902281-e838-41cf-bc34-89850ed0cf83-kube-api-access-xb8lt\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422031 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm6jq\" (UniqueName: \"kubernetes.io/projected/a1e61909-9629-4f0e-8393-508ce99aff63-kube-api-access-hm6jq\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422042 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1e61909-9629-4f0e-8393-508ce99aff63-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422051 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422080 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2902281-e838-41cf-bc34-89850ed0cf83-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.422089 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z78tl\" (UniqueName: \"kubernetes.io/projected/6732bc1e-c4ad-4519-bcfd-3c973c7528a6-kube-api-access-z78tl\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:49 crc kubenswrapper[4998]: E0203 07:04:49.421733 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:04:49 crc kubenswrapper[4998]: E0203 07:04:49.422116 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:04:49 crc kubenswrapper[4998]: E0203 07:04:49.422164 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:04:50.422147994 +0000 UTC m=+1128.708841790 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.653333 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6799k" event={"ID":"c5284078-0b06-4c22-bf9b-87b31d8f5e0f","Type":"ContainerDied","Data":"5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4"} Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.653379 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5726e185deba0a39f9baf1458bafc2dd3d6eb2c182a5f683aa9b13860d520af4" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.653352 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6799k" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.655162 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b3a0-account-create-update-2r7bm" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.655205 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b3a0-account-create-update-2r7bm" event={"ID":"a1e61909-9629-4f0e-8393-508ce99aff63","Type":"ContainerDied","Data":"cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c"} Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.655253 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cef2868240f40e61628b6a3f508ef8c10dc67c953d9f642a6d9be2d221f04d8c" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.656555 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-c26qq" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.656561 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c9a-account-create-update-c26qq" event={"ID":"b2902281-e838-41cf-bc34-89850ed0cf83","Type":"ContainerDied","Data":"34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c"} Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.656658 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34a205d8fc6ec19b4581f864e38c28e156ac69d7dcfbe48bb5faef9311a9da2c" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.661094 4998 generic.go:334] "Generic (PLEG): container finished" podID="c394067f-a8c9-412f-b55b-cd70e1081299" containerID="bfc537a7f6aad47ea27513107fa72a04e9180b10fc20db7547861380a59a5351" exitCode=0 Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.661155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" event={"ID":"c394067f-a8c9-412f-b55b-cd70e1081299","Type":"ContainerDied","Data":"bfc537a7f6aad47ea27513107fa72a04e9180b10fc20db7547861380a59a5351"} Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.668596 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jt7wn" event={"ID":"6732bc1e-c4ad-4519-bcfd-3c973c7528a6","Type":"ContainerDied","Data":"f6a67d6d03a4e0a7c4c55ab591b0dba1139ffc2ba4ffcdd5c1e3851947534b6a"} Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.668645 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6a67d6d03a4e0a7c4c55ab591b0dba1139ffc2ba4ffcdd5c1e3851947534b6a" Feb 03 07:04:49 crc kubenswrapper[4998]: I0203 07:04:49.668655 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jt7wn" Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.440405 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f111db0d-d66c-4eb1-af19-56741a2d109f" path="/var/lib/kubelet/pods/f111db0d-d66c-4eb1-af19-56741a2d109f/volumes" Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.441233 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2019872-2dfd-4470-883b-6dbbc86eb084" path="/var/lib/kubelet/pods/f2019872-2dfd-4470-883b-6dbbc86eb084/volumes" Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.459269 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:50 crc kubenswrapper[4998]: E0203 07:04:50.459507 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:04:50 crc kubenswrapper[4998]: E0203 07:04:50.459542 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:04:50 crc kubenswrapper[4998]: E0203 07:04:50.459607 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:04:52.459584088 +0000 UTC m=+1130.746277914 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.685664 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" event={"ID":"c394067f-a8c9-412f-b55b-cd70e1081299","Type":"ContainerStarted","Data":"7ca4ebc50b4c6320863dbfcf759eda98708bcd113f8b880b8e8f248e221525a8"} Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.687172 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:50 crc kubenswrapper[4998]: I0203 07:04:50.705659 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" podStartSLOduration=3.70563763 podStartE2EDuration="3.70563763s" podCreationTimestamp="2026-02-03 07:04:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:04:50.703196 +0000 UTC m=+1128.989889816" watchObservedRunningTime="2026-02-03 07:04:50.70563763 +0000 UTC m=+1128.992331456" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.507117 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.507353 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.507370 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.507424 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:04:56.507404189 +0000 UTC m=+1134.794098005 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.536408 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-nkj98"] Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.537356 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1e61909-9629-4f0e-8393-508ce99aff63" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537378 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1e61909-9629-4f0e-8393-508ce99aff63" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.537391 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2902281-e838-41cf-bc34-89850ed0cf83" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537400 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2902281-e838-41cf-bc34-89850ed0cf83" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.537416 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6732bc1e-c4ad-4519-bcfd-3c973c7528a6" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537429 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6732bc1e-c4ad-4519-bcfd-3c973c7528a6" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: E0203 07:04:52.537444 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5284078-0b06-4c22-bf9b-87b31d8f5e0f" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537452 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5284078-0b06-4c22-bf9b-87b31d8f5e0f" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537644 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1e61909-9629-4f0e-8393-508ce99aff63" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537661 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2902281-e838-41cf-bc34-89850ed0cf83" containerName="mariadb-account-create-update" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537676 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5284078-0b06-4c22-bf9b-87b31d8f5e0f" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.537693 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6732bc1e-c4ad-4519-bcfd-3c973c7528a6" containerName="mariadb-database-create" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.538530 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.540301 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.548178 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.548303 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.552354 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nkj98"] Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608317 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608374 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608425 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608457 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608552 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608596 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4psbb\" (UniqueName: \"kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.608628 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.709632 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.709732 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.709796 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4psbb\" (UniqueName: \"kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.709828 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.710000 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.710040 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.710080 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.710364 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.711475 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.712060 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.716843 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.716920 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.725199 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.728006 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4psbb\" (UniqueName: \"kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb\") pod \"swift-ring-rebalance-nkj98\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:52 crc kubenswrapper[4998]: I0203 07:04:52.860455 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.087662 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-6pdsl"] Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.089921 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.091766 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.097820 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-6pdsl"] Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.130257 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltqzp\" (UniqueName: \"kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.130398 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.232925 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltqzp\" (UniqueName: \"kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.233105 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.234281 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.255361 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltqzp\" (UniqueName: \"kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp\") pod \"root-account-create-update-6pdsl\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:54 crc kubenswrapper[4998]: I0203 07:04:54.423960 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-6pdsl" Feb 03 07:04:56 crc kubenswrapper[4998]: I0203 07:04:56.571131 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:04:56 crc kubenswrapper[4998]: E0203 07:04:56.571329 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:04:56 crc kubenswrapper[4998]: E0203 07:04:56.571441 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:04:56 crc kubenswrapper[4998]: E0203 07:04:56.571502 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:05:04.571487585 +0000 UTC m=+1142.858181391 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:04:57 crc kubenswrapper[4998]: I0203 07:04:57.957022 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.021617 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.021927 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="dnsmasq-dns" containerID="cri-o://fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e" gracePeriod=10 Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.043155 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.757277 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.764694 4998 generic.go:334] "Generic (PLEG): container finished" podID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerID="ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a" exitCode=0 Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.764768 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerDied","Data":"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a"} Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.781923 4998 generic.go:334] "Generic (PLEG): container finished" podID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerID="fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e" exitCode=0 Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.781973 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" event={"ID":"ed67f460-eadc-4355-a6b2-bb5f46615481","Type":"ContainerDied","Data":"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e"} Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.782002 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" event={"ID":"ed67f460-eadc-4355-a6b2-bb5f46615481","Type":"ContainerDied","Data":"d7d85b4aa4ae6033d8da49cf785f2c704ac8ab1d6b5cb36417f4b6535359d23c"} Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.782017 4998 scope.go:117] "RemoveContainer" containerID="fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.782164 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df986d9c-4vcgt" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.836931 4998 scope.go:117] "RemoveContainer" containerID="fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.861460 4998 scope.go:117] "RemoveContainer" containerID="fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e" Feb 03 07:04:58 crc kubenswrapper[4998]: E0203 07:04:58.861820 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e\": container with ID starting with fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e not found: ID does not exist" containerID="fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.861857 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e"} err="failed to get container status \"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e\": rpc error: code = NotFound desc = could not find container \"fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e\": container with ID starting with fbb376643c7409727027c1db0cc8550d9e85bc6669d8e93133191151135aa55e not found: ID does not exist" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.861884 4998 scope.go:117] "RemoveContainer" containerID="fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df" Feb 03 07:04:58 crc kubenswrapper[4998]: E0203 07:04:58.862167 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df\": container with ID starting with fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df not found: ID does not exist" containerID="fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.862216 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df"} err="failed to get container status \"fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df\": rpc error: code = NotFound desc = could not find container \"fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df\": container with ID starting with fb202ab9f9716c29b2c5589e45c6be6291adb213792510fab03eeb19e57c43df not found: ID does not exist" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.905338 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-6pdsl"] Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.906938 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc\") pod \"ed67f460-eadc-4355-a6b2-bb5f46615481\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.906999 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb\") pod \"ed67f460-eadc-4355-a6b2-bb5f46615481\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.907133 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config\") pod \"ed67f460-eadc-4355-a6b2-bb5f46615481\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.907162 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w5fs\" (UniqueName: \"kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs\") pod \"ed67f460-eadc-4355-a6b2-bb5f46615481\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.907208 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb\") pod \"ed67f460-eadc-4355-a6b2-bb5f46615481\" (UID: \"ed67f460-eadc-4355-a6b2-bb5f46615481\") " Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.910712 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs" (OuterVolumeSpecName: "kube-api-access-9w5fs") pod "ed67f460-eadc-4355-a6b2-bb5f46615481" (UID: "ed67f460-eadc-4355-a6b2-bb5f46615481"). InnerVolumeSpecName "kube-api-access-9w5fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.914752 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nkj98"] Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.971818 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ed67f460-eadc-4355-a6b2-bb5f46615481" (UID: "ed67f460-eadc-4355-a6b2-bb5f46615481"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.985019 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config" (OuterVolumeSpecName: "config") pod "ed67f460-eadc-4355-a6b2-bb5f46615481" (UID: "ed67f460-eadc-4355-a6b2-bb5f46615481"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.988493 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed67f460-eadc-4355-a6b2-bb5f46615481" (UID: "ed67f460-eadc-4355-a6b2-bb5f46615481"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:58 crc kubenswrapper[4998]: I0203 07:04:58.991459 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed67f460-eadc-4355-a6b2-bb5f46615481" (UID: "ed67f460-eadc-4355-a6b2-bb5f46615481"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.009373 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.009401 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.009413 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.009422 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w5fs\" (UniqueName: \"kubernetes.io/projected/ed67f460-eadc-4355-a6b2-bb5f46615481-kube-api-access-9w5fs\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.009431 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed67f460-eadc-4355-a6b2-bb5f46615481-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.118540 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.126048 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df986d9c-4vcgt"] Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.795517 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerStarted","Data":"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.796056 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.799090 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kvz5p" event={"ID":"0c5507ca-3689-4044-8e7c-37627a2f2759","Type":"ContainerStarted","Data":"e7a97b7a54e21576462742ef1d8dc1f7c269e1841dc59cd469d2f08c927fcbf6"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.800585 4998 generic.go:334] "Generic (PLEG): container finished" podID="e4c4e482-8eb9-4fa6-8074-7fd0bea98618" containerID="ccf47ac913228d1d870b36da0922a4284dd8fff26b107cf29c51bc32838a015b" exitCode=0 Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.800652 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-6pdsl" event={"ID":"e4c4e482-8eb9-4fa6-8074-7fd0bea98618","Type":"ContainerDied","Data":"ccf47ac913228d1d870b36da0922a4284dd8fff26b107cf29c51bc32838a015b"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.800676 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-6pdsl" event={"ID":"e4c4e482-8eb9-4fa6-8074-7fd0bea98618","Type":"ContainerStarted","Data":"557ed641657ca168c4204ffb4f19be26b292147b4c21824509044c0bc003f7f8"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.812267 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nkj98" event={"ID":"baf70899-28c3-49dd-93b9-5645b847eb91","Type":"ContainerStarted","Data":"27a6929c8b6dbf68bcf017c9b09f09bd221cb9a47810a6598c94cc074fdd406b"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.813927 4998 generic.go:334] "Generic (PLEG): container finished" podID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerID="76b83e5ba7d49d4a25c729bc84136855824cc063c69e5eb9aff88e90d282666d" exitCode=0 Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.813967 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerDied","Data":"76b83e5ba7d49d4a25c729bc84136855824cc063c69e5eb9aff88e90d282666d"} Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.821164 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.965463695 podStartE2EDuration="58.82114936s" podCreationTimestamp="2026-02-03 07:04:01 +0000 UTC" firstStartedPulling="2026-02-03 07:04:03.520564032 +0000 UTC m=+1081.807257848" lastFinishedPulling="2026-02-03 07:04:25.376249707 +0000 UTC m=+1103.662943513" observedRunningTime="2026-02-03 07:04:59.817267719 +0000 UTC m=+1138.103961545" watchObservedRunningTime="2026-02-03 07:04:59.82114936 +0000 UTC m=+1138.107843176" Feb 03 07:04:59 crc kubenswrapper[4998]: I0203 07:04:59.871977 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-kvz5p" podStartSLOduration=2.3897544059999998 podStartE2EDuration="13.871955816s" podCreationTimestamp="2026-02-03 07:04:46 +0000 UTC" firstStartedPulling="2026-02-03 07:04:47.034506795 +0000 UTC m=+1125.321200601" lastFinishedPulling="2026-02-03 07:04:58.516708195 +0000 UTC m=+1136.803402011" observedRunningTime="2026-02-03 07:04:59.864002978 +0000 UTC m=+1138.150696784" watchObservedRunningTime="2026-02-03 07:04:59.871955816 +0000 UTC m=+1138.158649642" Feb 03 07:05:00 crc kubenswrapper[4998]: I0203 07:05:00.439819 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" path="/var/lib/kubelet/pods/ed67f460-eadc-4355-a6b2-bb5f46615481/volumes" Feb 03 07:05:00 crc kubenswrapper[4998]: I0203 07:05:00.826643 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerStarted","Data":"098c0a6d87cc827e487059fcc544a85b758f02d99c658f66e5fbf5c96ee6f34c"} Feb 03 07:05:00 crc kubenswrapper[4998]: I0203 07:05:00.827991 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 03 07:05:00 crc kubenswrapper[4998]: I0203 07:05:00.868974 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.992714262 podStartE2EDuration="59.868947731s" podCreationTimestamp="2026-02-03 07:04:01 +0000 UTC" firstStartedPulling="2026-02-03 07:04:03.479054547 +0000 UTC m=+1081.765748353" lastFinishedPulling="2026-02-03 07:04:25.355288016 +0000 UTC m=+1103.641981822" observedRunningTime="2026-02-03 07:05:00.851233303 +0000 UTC m=+1139.137927119" watchObservedRunningTime="2026-02-03 07:05:00.868947731 +0000 UTC m=+1139.155641537" Feb 03 07:05:01 crc kubenswrapper[4998]: I0203 07:05:01.564450 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-sm6db" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:05:01 crc kubenswrapper[4998]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:05:01 crc kubenswrapper[4998]: > Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.209644 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-6pdsl" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.275227 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts\") pod \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.275293 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltqzp\" (UniqueName: \"kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp\") pod \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\" (UID: \"e4c4e482-8eb9-4fa6-8074-7fd0bea98618\") " Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.276123 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e4c4e482-8eb9-4fa6-8074-7fd0bea98618" (UID: "e4c4e482-8eb9-4fa6-8074-7fd0bea98618"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.280122 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp" (OuterVolumeSpecName: "kube-api-access-ltqzp") pod "e4c4e482-8eb9-4fa6-8074-7fd0bea98618" (UID: "e4c4e482-8eb9-4fa6-8074-7fd0bea98618"). InnerVolumeSpecName "kube-api-access-ltqzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.377736 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.377793 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltqzp\" (UniqueName: \"kubernetes.io/projected/e4c4e482-8eb9-4fa6-8074-7fd0bea98618-kube-api-access-ltqzp\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.853580 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nkj98" event={"ID":"baf70899-28c3-49dd-93b9-5645b847eb91","Type":"ContainerStarted","Data":"1344285d68105646b73eef86c4c93645fecb1cf62976d14576dbc6aeeb280daf"} Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.857077 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-6pdsl" event={"ID":"e4c4e482-8eb9-4fa6-8074-7fd0bea98618","Type":"ContainerDied","Data":"557ed641657ca168c4204ffb4f19be26b292147b4c21824509044c0bc003f7f8"} Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.857137 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="557ed641657ca168c4204ffb4f19be26b292147b4c21824509044c0bc003f7f8" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.857191 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-6pdsl" Feb 03 07:05:03 crc kubenswrapper[4998]: I0203 07:05:03.879839 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-nkj98" podStartSLOduration=7.718092546 podStartE2EDuration="11.879821112s" podCreationTimestamp="2026-02-03 07:04:52 +0000 UTC" firstStartedPulling="2026-02-03 07:04:58.942085287 +0000 UTC m=+1137.228779103" lastFinishedPulling="2026-02-03 07:05:03.103813863 +0000 UTC m=+1141.390507669" observedRunningTime="2026-02-03 07:05:03.878449733 +0000 UTC m=+1142.165143559" watchObservedRunningTime="2026-02-03 07:05:03.879821112 +0000 UTC m=+1142.166514918" Feb 03 07:05:04 crc kubenswrapper[4998]: I0203 07:05:04.597653 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:05:04 crc kubenswrapper[4998]: E0203 07:05:04.597846 4998 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:05:04 crc kubenswrapper[4998]: E0203 07:05:04.597865 4998 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:05:04 crc kubenswrapper[4998]: E0203 07:05:04.597914 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift podName:09cd9158-f279-4ac0-b8fe-0121e85a1b20 nodeName:}" failed. No retries permitted until 2026-02-03 07:05:20.597900163 +0000 UTC m=+1158.884593959 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift") pod "swift-storage-0" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20") : configmap "swift-ring-files" not found Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.560398 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-sm6db" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:05:06 crc kubenswrapper[4998]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:05:06 crc kubenswrapper[4998]: > Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.582678 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.587618 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.806521 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-sm6db-config-d7pv5"] Feb 03 07:05:06 crc kubenswrapper[4998]: E0203 07:05:06.806949 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4c4e482-8eb9-4fa6-8074-7fd0bea98618" containerName="mariadb-account-create-update" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.806980 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4c4e482-8eb9-4fa6-8074-7fd0bea98618" containerName="mariadb-account-create-update" Feb 03 07:05:06 crc kubenswrapper[4998]: E0203 07:05:06.806996 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="init" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.807003 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="init" Feb 03 07:05:06 crc kubenswrapper[4998]: E0203 07:05:06.807018 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="dnsmasq-dns" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.807026 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="dnsmasq-dns" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.807231 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed67f460-eadc-4355-a6b2-bb5f46615481" containerName="dnsmasq-dns" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.807244 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4c4e482-8eb9-4fa6-8074-7fd0bea98618" containerName="mariadb-account-create-update" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.807890 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.810145 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.826477 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-sm6db-config-d7pv5"] Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.831740 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jshsr\" (UniqueName: \"kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.831808 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.831859 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.832001 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.832060 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.832338 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934389 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934508 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jshsr\" (UniqueName: \"kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934537 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934558 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934634 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934670 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934891 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.934937 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.935012 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.935570 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.937243 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:06 crc kubenswrapper[4998]: I0203 07:05:06.957053 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jshsr\" (UniqueName: \"kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr\") pod \"ovn-controller-sm6db-config-d7pv5\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:07 crc kubenswrapper[4998]: I0203 07:05:07.126433 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:07 crc kubenswrapper[4998]: I0203 07:05:07.609210 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-sm6db-config-d7pv5"] Feb 03 07:05:07 crc kubenswrapper[4998]: I0203 07:05:07.885865 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db-config-d7pv5" event={"ID":"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf","Type":"ContainerStarted","Data":"82843c1a576a724caee817065f9cae98473eb180553c58cec0aac92ce16eb2e0"} Feb 03 07:05:08 crc kubenswrapper[4998]: I0203 07:05:08.895155 4998 generic.go:334] "Generic (PLEG): container finished" podID="0c5507ca-3689-4044-8e7c-37627a2f2759" containerID="e7a97b7a54e21576462742ef1d8dc1f7c269e1841dc59cd469d2f08c927fcbf6" exitCode=0 Feb 03 07:05:08 crc kubenswrapper[4998]: I0203 07:05:08.895237 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kvz5p" event={"ID":"0c5507ca-3689-4044-8e7c-37627a2f2759","Type":"ContainerDied","Data":"e7a97b7a54e21576462742ef1d8dc1f7c269e1841dc59cd469d2f08c927fcbf6"} Feb 03 07:05:08 crc kubenswrapper[4998]: I0203 07:05:08.896961 4998 generic.go:334] "Generic (PLEG): container finished" podID="ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" containerID="ed451cc8d54143288975cbbac680ed008712b7ec6322d8711d81f0f4eeb8175d" exitCode=0 Feb 03 07:05:08 crc kubenswrapper[4998]: I0203 07:05:08.896998 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db-config-d7pv5" event={"ID":"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf","Type":"ContainerDied","Data":"ed451cc8d54143288975cbbac680ed008712b7ec6322d8711d81f0f4eeb8175d"} Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.322799 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.335029 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kvz5p" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.393971 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data\") pod \"0c5507ca-3689-4044-8e7c-37627a2f2759\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394041 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data\") pod \"0c5507ca-3689-4044-8e7c-37627a2f2759\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394078 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle\") pod \"0c5507ca-3689-4044-8e7c-37627a2f2759\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394167 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk5kj\" (UniqueName: \"kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj\") pod \"0c5507ca-3689-4044-8e7c-37627a2f2759\" (UID: \"0c5507ca-3689-4044-8e7c-37627a2f2759\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394191 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394233 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394294 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jshsr\" (UniqueName: \"kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394314 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394335 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.394359 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts\") pod \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\" (UID: \"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf\") " Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.395316 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.395451 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts" (OuterVolumeSpecName: "scripts") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.395798 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run" (OuterVolumeSpecName: "var-run") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.395842 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.396635 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.399803 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0c5507ca-3689-4044-8e7c-37627a2f2759" (UID: "0c5507ca-3689-4044-8e7c-37627a2f2759"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.400121 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj" (OuterVolumeSpecName: "kube-api-access-tk5kj") pod "0c5507ca-3689-4044-8e7c-37627a2f2759" (UID: "0c5507ca-3689-4044-8e7c-37627a2f2759"). InnerVolumeSpecName "kube-api-access-tk5kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.400194 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr" (OuterVolumeSpecName: "kube-api-access-jshsr") pod "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" (UID: "ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf"). InnerVolumeSpecName "kube-api-access-jshsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.422432 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c5507ca-3689-4044-8e7c-37627a2f2759" (UID: "0c5507ca-3689-4044-8e7c-37627a2f2759"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.434816 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data" (OuterVolumeSpecName: "config-data") pod "0c5507ca-3689-4044-8e7c-37627a2f2759" (UID: "0c5507ca-3689-4044-8e7c-37627a2f2759"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497167 4998 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497198 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jshsr\" (UniqueName: \"kubernetes.io/projected/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-kube-api-access-jshsr\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497209 4998 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497217 4998 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497226 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497234 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497243 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497251 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5507ca-3689-4044-8e7c-37627a2f2759-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497262 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk5kj\" (UniqueName: \"kubernetes.io/projected/0c5507ca-3689-4044-8e7c-37627a2f2759-kube-api-access-tk5kj\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.497269 4998 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.911653 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db-config-d7pv5" event={"ID":"ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf","Type":"ContainerDied","Data":"82843c1a576a724caee817065f9cae98473eb180553c58cec0aac92ce16eb2e0"} Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.911705 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82843c1a576a724caee817065f9cae98473eb180553c58cec0aac92ce16eb2e0" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.911678 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db-config-d7pv5" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.913843 4998 generic.go:334] "Generic (PLEG): container finished" podID="baf70899-28c3-49dd-93b9-5645b847eb91" containerID="1344285d68105646b73eef86c4c93645fecb1cf62976d14576dbc6aeeb280daf" exitCode=0 Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.913906 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nkj98" event={"ID":"baf70899-28c3-49dd-93b9-5645b847eb91","Type":"ContainerDied","Data":"1344285d68105646b73eef86c4c93645fecb1cf62976d14576dbc6aeeb280daf"} Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.917083 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kvz5p" Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.917075 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kvz5p" event={"ID":"0c5507ca-3689-4044-8e7c-37627a2f2759","Type":"ContainerDied","Data":"2c10ac4a4935c7a0a4a12130685f0e7194593fb64878fab62f4d4188e66033fa"} Feb 03 07:05:10 crc kubenswrapper[4998]: I0203 07:05:10.917258 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c10ac4a4935c7a0a4a12130685f0e7194593fb64878fab62f4d4188e66033fa" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.355505 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:11 crc kubenswrapper[4998]: E0203 07:05:11.362650 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c5507ca-3689-4044-8e7c-37627a2f2759" containerName="glance-db-sync" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.362688 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c5507ca-3689-4044-8e7c-37627a2f2759" containerName="glance-db-sync" Feb 03 07:05:11 crc kubenswrapper[4998]: E0203 07:05:11.362705 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" containerName="ovn-config" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.362711 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" containerName="ovn-config" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.363003 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c5507ca-3689-4044-8e7c-37627a2f2759" containerName="glance-db-sync" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.363020 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" containerName="ovn-config" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.363887 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.404038 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.417256 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.417511 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.417631 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.417945 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.418078 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7bqs\" (UniqueName: \"kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.502449 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-sm6db-config-d7pv5"] Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.511632 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-sm6db-config-d7pv5"] Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.519402 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.519464 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7bqs\" (UniqueName: \"kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.519531 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.519554 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.519571 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.520665 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.520697 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.520767 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.520762 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.566047 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7bqs\" (UniqueName: \"kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs\") pod \"dnsmasq-dns-d489f5d97-pntcj\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.663561 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-sm6db" Feb 03 07:05:11 crc kubenswrapper[4998]: I0203 07:05:11.680642 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.136117 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.213183 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330104 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330289 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330320 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4psbb\" (UniqueName: \"kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330367 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330389 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.330478 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift\") pod \"baf70899-28c3-49dd-93b9-5645b847eb91\" (UID: \"baf70899-28c3-49dd-93b9-5645b847eb91\") " Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.331945 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.332963 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.334842 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb" (OuterVolumeSpecName: "kube-api-access-4psbb") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "kube-api-access-4psbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.340081 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.366022 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.368435 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.369478 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts" (OuterVolumeSpecName: "scripts") pod "baf70899-28c3-49dd-93b9-5645b847eb91" (UID: "baf70899-28c3-49dd-93b9-5645b847eb91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432313 4998 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432345 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4psbb\" (UniqueName: \"kubernetes.io/projected/baf70899-28c3-49dd-93b9-5645b847eb91-kube-api-access-4psbb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432353 4998 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432362 4998 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432370 4998 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/baf70899-28c3-49dd-93b9-5645b847eb91-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432377 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/baf70899-28c3-49dd-93b9-5645b847eb91-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.432385 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baf70899-28c3-49dd-93b9-5645b847eb91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.444555 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf" path="/var/lib/kubelet/pods/ebea0f6e-b9ea-48b9-9bb0-233f1bc57fcf/volumes" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.754042 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.754098 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.816954 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.902994 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.939820 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nkj98" event={"ID":"baf70899-28c3-49dd-93b9-5645b847eb91","Type":"ContainerDied","Data":"27a6929c8b6dbf68bcf017c9b09f09bd221cb9a47810a6598c94cc074fdd406b"} Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.940178 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27a6929c8b6dbf68bcf017c9b09f09bd221cb9a47810a6598c94cc074fdd406b" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.939833 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nkj98" Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.942281 4998 generic.go:334] "Generic (PLEG): container finished" podID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerID="4437c1cd8a0173e6e62c2f463d5924573836fa7c3fdbb56d6f1aa151344acd9d" exitCode=0 Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.942326 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" event={"ID":"cf2f7b9f-8064-4194-bff3-481d1e9b4221","Type":"ContainerDied","Data":"4437c1cd8a0173e6e62c2f463d5924573836fa7c3fdbb56d6f1aa151344acd9d"} Feb 03 07:05:12 crc kubenswrapper[4998]: I0203 07:05:12.942363 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" event={"ID":"cf2f7b9f-8064-4194-bff3-481d1e9b4221","Type":"ContainerStarted","Data":"7f3eea7af726d074d9cd42bdb3d70a0b4b4226ce9ccf6bbef5e7483f48c9f2da"} Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.188847 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mq5f7"] Feb 03 07:05:13 crc kubenswrapper[4998]: E0203 07:05:13.189277 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baf70899-28c3-49dd-93b9-5645b847eb91" containerName="swift-ring-rebalance" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.189304 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="baf70899-28c3-49dd-93b9-5645b847eb91" containerName="swift-ring-rebalance" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.189711 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="baf70899-28c3-49dd-93b9-5645b847eb91" containerName="swift-ring-rebalance" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.190358 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.228552 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mq5f7"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.314526 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-vf44v"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.316048 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.333121 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vf44v"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.366313 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh9d4\" (UniqueName: \"kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.367547 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.376598 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-e892-account-create-update-2vvtl"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.379796 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.385684 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.403610 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e892-account-create-update-2vvtl"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.463198 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qkqjf"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.464697 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.470296 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-f31a-account-create-update-g2z99"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.471331 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.472506 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.472642 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.472809 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7gx9\" (UniqueName: \"kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.473201 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stjzd\" (UniqueName: \"kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.473386 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.473502 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.473664 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh9d4\" (UniqueName: \"kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.475769 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.484365 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f31a-account-create-update-g2z99"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.503962 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh9d4\" (UniqueName: \"kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4\") pod \"cinder-db-create-mq5f7\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.515735 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qkqjf"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.525259 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.574858 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.574983 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575017 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7gx9\" (UniqueName: \"kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrjgd\" (UniqueName: \"kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575082 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stjzd\" (UniqueName: \"kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575238 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.575266 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqdrt\" (UniqueName: \"kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.576138 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.576390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.596085 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stjzd\" (UniqueName: \"kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd\") pod \"barbican-db-create-vf44v\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.597803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7gx9\" (UniqueName: \"kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9\") pod \"barbican-e892-account-create-update-2vvtl\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.638467 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.654523 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4h259"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.655870 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.677005 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-k4sf9" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.677201 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.677330 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.687716 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688207 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688303 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f86qt\" (UniqueName: \"kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688335 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688355 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqdrt\" (UniqueName: \"kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688394 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688483 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.688530 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrjgd\" (UniqueName: \"kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.689621 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.700461 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.704153 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.704855 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4h259"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.717767 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqdrt\" (UniqueName: \"kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt\") pod \"neutron-db-create-qkqjf\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.718245 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrjgd\" (UniqueName: \"kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd\") pod \"cinder-f31a-account-create-update-g2z99\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.784255 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.790707 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.790767 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f86qt\" (UniqueName: \"kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.790845 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.794813 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.795628 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.797445 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.815864 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f8d7-account-create-update-gctw4"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.816817 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.819444 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.821406 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f86qt\" (UniqueName: \"kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt\") pod \"keystone-db-sync-4h259\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.835559 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f8d7-account-create-update-gctw4"] Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.965822 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" event={"ID":"cf2f7b9f-8064-4194-bff3-481d1e9b4221","Type":"ContainerStarted","Data":"70411b347a9e959e62d523e1073fa00e328f7777da526db88ffb79ecb94830d9"} Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.966905 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.995866 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zn6j\" (UniqueName: \"kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:13 crc kubenswrapper[4998]: I0203 07:05:13.995997 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.097191 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zn6j\" (UniqueName: \"kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.097302 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.098079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.104532 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.111689 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" podStartSLOduration=3.111664517 podStartE2EDuration="3.111664517s" podCreationTimestamp="2026-02-03 07:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:14.010380165 +0000 UTC m=+1152.297073971" watchObservedRunningTime="2026-02-03 07:05:14.111664517 +0000 UTC m=+1152.398358323" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.114664 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mq5f7"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.144808 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zn6j\" (UniqueName: \"kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j\") pod \"neutron-f8d7-account-create-update-gctw4\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.167870 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.171836 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vf44v"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.527702 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qkqjf"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.548060 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e892-account-create-update-2vvtl"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.566195 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f31a-account-create-update-g2z99"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.615388 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4h259"] Feb 03 07:05:14 crc kubenswrapper[4998]: W0203 07:05:14.636179 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23f901f4_5696_4545_99e5_f8b1583b7431.slice/crio-b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c WatchSource:0}: Error finding container b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c: Status 404 returned error can't find the container with id b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.655189 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f8d7-account-create-update-gctw4"] Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.978338 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qkqjf" event={"ID":"2a31c817-df72-4207-976b-1caee9f68a7c","Type":"ContainerStarted","Data":"ed0ca0d8ce5e7ade500c4d4aa341d39acc024b8e9236b0f5df87b93ddafb5fda"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.983973 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4h259" event={"ID":"23f901f4-5696-4545-99e5-f8b1583b7431","Type":"ContainerStarted","Data":"b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.986353 4998 generic.go:334] "Generic (PLEG): container finished" podID="21b5727f-2631-4151-acf1-9b77772d57ca" containerID="7b34f3b359b15d4babf364e4c5ad6f27c9f05f151f44d5e2d29ae10254e3e288" exitCode=0 Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.986457 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mq5f7" event={"ID":"21b5727f-2631-4151-acf1-9b77772d57ca","Type":"ContainerDied","Data":"7b34f3b359b15d4babf364e4c5ad6f27c9f05f151f44d5e2d29ae10254e3e288"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.986541 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mq5f7" event={"ID":"21b5727f-2631-4151-acf1-9b77772d57ca","Type":"ContainerStarted","Data":"ebf697667107594f53459d10ae2c8b2b40760bd574bef2cd1ab8b60fc8abd18d"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.988411 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e892-account-create-update-2vvtl" event={"ID":"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9","Type":"ContainerStarted","Data":"747dee0abf2bce7f81d4f7f68353485508ea05f40e7d2df58e6d6bf6cc21b237"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.993209 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f31a-account-create-update-g2z99" event={"ID":"bb875d5f-c265-49c5-a25d-76999819149b","Type":"ContainerStarted","Data":"4f1694b43d6ece91b8c5a5c30decd4aa8e22a02b86e8c288030c793f3ef52294"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.995064 4998 generic.go:334] "Generic (PLEG): container finished" podID="f25504d8-c4cd-467e-989f-cefa0f7f6607" containerID="9839f10fdf00322e85b9f0ff6cf51326745d8c4dee0f46e8b387490e0332e53e" exitCode=0 Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.995202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vf44v" event={"ID":"f25504d8-c4cd-467e-989f-cefa0f7f6607","Type":"ContainerDied","Data":"9839f10fdf00322e85b9f0ff6cf51326745d8c4dee0f46e8b387490e0332e53e"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.995222 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vf44v" event={"ID":"f25504d8-c4cd-467e-989f-cefa0f7f6607","Type":"ContainerStarted","Data":"8505443bdb3ecd73cbea91ad9c8a1ee4c3578e1f5c29c9f231e0d48372b17112"} Feb 03 07:05:14 crc kubenswrapper[4998]: I0203 07:05:14.998424 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f8d7-account-create-update-gctw4" event={"ID":"dac702ac-33bf-46f4-966d-41573aaf1c28","Type":"ContainerStarted","Data":"b90f3e93f1a2083535b02105b53e8f5ad1158a6a328460f8e47711bc7b4e7966"} Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.008660 4998 generic.go:334] "Generic (PLEG): container finished" podID="dac702ac-33bf-46f4-966d-41573aaf1c28" containerID="dddd8ed5fabdd6e303ec79235c5f2936c1faede50d32945f56a100075cd79738" exitCode=0 Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.009039 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f8d7-account-create-update-gctw4" event={"ID":"dac702ac-33bf-46f4-966d-41573aaf1c28","Type":"ContainerDied","Data":"dddd8ed5fabdd6e303ec79235c5f2936c1faede50d32945f56a100075cd79738"} Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.012702 4998 generic.go:334] "Generic (PLEG): container finished" podID="2a31c817-df72-4207-976b-1caee9f68a7c" containerID="98e79ca3fe1ad81caf41a89c459a6d4ecbc7ebae3b0750029e6181c1ec7c5830" exitCode=0 Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.012754 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qkqjf" event={"ID":"2a31c817-df72-4207-976b-1caee9f68a7c","Type":"ContainerDied","Data":"98e79ca3fe1ad81caf41a89c459a6d4ecbc7ebae3b0750029e6181c1ec7c5830"} Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.014622 4998 generic.go:334] "Generic (PLEG): container finished" podID="0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" containerID="682e5a4bbf584c44a4069c9d5968a1ba452e90f8f5c4261070ad2e41451a7311" exitCode=0 Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.014678 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e892-account-create-update-2vvtl" event={"ID":"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9","Type":"ContainerDied","Data":"682e5a4bbf584c44a4069c9d5968a1ba452e90f8f5c4261070ad2e41451a7311"} Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.016519 4998 generic.go:334] "Generic (PLEG): container finished" podID="bb875d5f-c265-49c5-a25d-76999819149b" containerID="ef295711b2e046d68735b381acaf0905d33919b33ca611ce4e5c1b3c06dbea10" exitCode=0 Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.017405 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f31a-account-create-update-g2z99" event={"ID":"bb875d5f-c265-49c5-a25d-76999819149b","Type":"ContainerDied","Data":"ef295711b2e046d68735b381acaf0905d33919b33ca611ce4e5c1b3c06dbea10"} Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.459398 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.471481 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stjzd\" (UniqueName: \"kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd\") pod \"f25504d8-c4cd-467e-989f-cefa0f7f6607\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.471633 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts\") pod \"f25504d8-c4cd-467e-989f-cefa0f7f6607\" (UID: \"f25504d8-c4cd-467e-989f-cefa0f7f6607\") " Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.474322 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f25504d8-c4cd-467e-989f-cefa0f7f6607" (UID: "f25504d8-c4cd-467e-989f-cefa0f7f6607"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.477753 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd" (OuterVolumeSpecName: "kube-api-access-stjzd") pod "f25504d8-c4cd-467e-989f-cefa0f7f6607" (UID: "f25504d8-c4cd-467e-989f-cefa0f7f6607"). InnerVolumeSpecName "kube-api-access-stjzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.574858 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f25504d8-c4cd-467e-989f-cefa0f7f6607-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:16 crc kubenswrapper[4998]: I0203 07:05:16.574883 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stjzd\" (UniqueName: \"kubernetes.io/projected/f25504d8-c4cd-467e-989f-cefa0f7f6607-kube-api-access-stjzd\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:17 crc kubenswrapper[4998]: I0203 07:05:17.026597 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vf44v" event={"ID":"f25504d8-c4cd-467e-989f-cefa0f7f6607","Type":"ContainerDied","Data":"8505443bdb3ecd73cbea91ad9c8a1ee4c3578e1f5c29c9f231e0d48372b17112"} Feb 03 07:05:17 crc kubenswrapper[4998]: I0203 07:05:17.026646 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8505443bdb3ecd73cbea91ad9c8a1ee4c3578e1f5c29c9f231e0d48372b17112" Feb 03 07:05:17 crc kubenswrapper[4998]: I0203 07:05:17.026655 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vf44v" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.431380 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.447018 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.470745 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.495746 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.497769 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.641712 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts\") pod \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.641835 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh9d4\" (UniqueName: \"kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4\") pod \"21b5727f-2631-4151-acf1-9b77772d57ca\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.641942 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts\") pod \"bb875d5f-c265-49c5-a25d-76999819149b\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642019 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrjgd\" (UniqueName: \"kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd\") pod \"bb875d5f-c265-49c5-a25d-76999819149b\" (UID: \"bb875d5f-c265-49c5-a25d-76999819149b\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642100 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqdrt\" (UniqueName: \"kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt\") pod \"2a31c817-df72-4207-976b-1caee9f68a7c\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642134 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zn6j\" (UniqueName: \"kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j\") pod \"dac702ac-33bf-46f4-966d-41573aaf1c28\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642184 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7gx9\" (UniqueName: \"kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9\") pod \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\" (UID: \"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642210 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts\") pod \"21b5727f-2631-4151-acf1-9b77772d57ca\" (UID: \"21b5727f-2631-4151-acf1-9b77772d57ca\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642244 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts\") pod \"dac702ac-33bf-46f4-966d-41573aaf1c28\" (UID: \"dac702ac-33bf-46f4-966d-41573aaf1c28\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.642286 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts\") pod \"2a31c817-df72-4207-976b-1caee9f68a7c\" (UID: \"2a31c817-df72-4207-976b-1caee9f68a7c\") " Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.644888 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "21b5727f-2631-4151-acf1-9b77772d57ca" (UID: "21b5727f-2631-4151-acf1-9b77772d57ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.644893 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bb875d5f-c265-49c5-a25d-76999819149b" (UID: "bb875d5f-c265-49c5-a25d-76999819149b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.645960 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a31c817-df72-4207-976b-1caee9f68a7c" (UID: "2a31c817-df72-4207-976b-1caee9f68a7c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.646012 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dac702ac-33bf-46f4-966d-41573aaf1c28" (UID: "dac702ac-33bf-46f4-966d-41573aaf1c28"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.647384 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" (UID: "0f02cdb5-1d56-4077-9630-7fc6eaa53bc9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.647903 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4" (OuterVolumeSpecName: "kube-api-access-hh9d4") pod "21b5727f-2631-4151-acf1-9b77772d57ca" (UID: "21b5727f-2631-4151-acf1-9b77772d57ca"). InnerVolumeSpecName "kube-api-access-hh9d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.649000 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9" (OuterVolumeSpecName: "kube-api-access-r7gx9") pod "0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" (UID: "0f02cdb5-1d56-4077-9630-7fc6eaa53bc9"). InnerVolumeSpecName "kube-api-access-r7gx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.649713 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt" (OuterVolumeSpecName: "kube-api-access-tqdrt") pod "2a31c817-df72-4207-976b-1caee9f68a7c" (UID: "2a31c817-df72-4207-976b-1caee9f68a7c"). InnerVolumeSpecName "kube-api-access-tqdrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.649885 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd" (OuterVolumeSpecName: "kube-api-access-jrjgd") pod "bb875d5f-c265-49c5-a25d-76999819149b" (UID: "bb875d5f-c265-49c5-a25d-76999819149b"). InnerVolumeSpecName "kube-api-access-jrjgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.651077 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j" (OuterVolumeSpecName: "kube-api-access-7zn6j") pod "dac702ac-33bf-46f4-966d-41573aaf1c28" (UID: "dac702ac-33bf-46f4-966d-41573aaf1c28"). InnerVolumeSpecName "kube-api-access-7zn6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744385 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqdrt\" (UniqueName: \"kubernetes.io/projected/2a31c817-df72-4207-976b-1caee9f68a7c-kube-api-access-tqdrt\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744441 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zn6j\" (UniqueName: \"kubernetes.io/projected/dac702ac-33bf-46f4-966d-41573aaf1c28-kube-api-access-7zn6j\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744457 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b5727f-2631-4151-acf1-9b77772d57ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744471 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7gx9\" (UniqueName: \"kubernetes.io/projected/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-kube-api-access-r7gx9\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744486 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dac702ac-33bf-46f4-966d-41573aaf1c28-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744500 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a31c817-df72-4207-976b-1caee9f68a7c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744513 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744525 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh9d4\" (UniqueName: \"kubernetes.io/projected/21b5727f-2631-4151-acf1-9b77772d57ca-kube-api-access-hh9d4\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744539 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb875d5f-c265-49c5-a25d-76999819149b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:19 crc kubenswrapper[4998]: I0203 07:05:19.744551 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrjgd\" (UniqueName: \"kubernetes.io/projected/bb875d5f-c265-49c5-a25d-76999819149b-kube-api-access-jrjgd\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.057446 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mq5f7" event={"ID":"21b5727f-2631-4151-acf1-9b77772d57ca","Type":"ContainerDied","Data":"ebf697667107594f53459d10ae2c8b2b40760bd574bef2cd1ab8b60fc8abd18d"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.057482 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mq5f7" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.057487 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebf697667107594f53459d10ae2c8b2b40760bd574bef2cd1ab8b60fc8abd18d" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.059994 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-2vvtl" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.059991 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e892-account-create-update-2vvtl" event={"ID":"0f02cdb5-1d56-4077-9630-7fc6eaa53bc9","Type":"ContainerDied","Data":"747dee0abf2bce7f81d4f7f68353485508ea05f40e7d2df58e6d6bf6cc21b237"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.060103 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="747dee0abf2bce7f81d4f7f68353485508ea05f40e7d2df58e6d6bf6cc21b237" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.062657 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f31a-account-create-update-g2z99" event={"ID":"bb875d5f-c265-49c5-a25d-76999819149b","Type":"ContainerDied","Data":"4f1694b43d6ece91b8c5a5c30decd4aa8e22a02b86e8c288030c793f3ef52294"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.062696 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f1694b43d6ece91b8c5a5c30decd4aa8e22a02b86e8c288030c793f3ef52294" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.062763 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f31a-account-create-update-g2z99" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.079013 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f8d7-account-create-update-gctw4" event={"ID":"dac702ac-33bf-46f4-966d-41573aaf1c28","Type":"ContainerDied","Data":"b90f3e93f1a2083535b02105b53e8f5ad1158a6a328460f8e47711bc7b4e7966"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.079070 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f8d7-account-create-update-gctw4" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.079083 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b90f3e93f1a2083535b02105b53e8f5ad1158a6a328460f8e47711bc7b4e7966" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.083566 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4h259" event={"ID":"23f901f4-5696-4545-99e5-f8b1583b7431","Type":"ContainerStarted","Data":"838afd8286ed26e4ad4d792c8d67e2582c1b7b33d6d12d95ec79703f142f40cc"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.085979 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qkqjf" event={"ID":"2a31c817-df72-4207-976b-1caee9f68a7c","Type":"ContainerDied","Data":"ed0ca0d8ce5e7ade500c4d4aa341d39acc024b8e9236b0f5df87b93ddafb5fda"} Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.086020 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed0ca0d8ce5e7ade500c4d4aa341d39acc024b8e9236b0f5df87b93ddafb5fda" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.086071 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qkqjf" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.115683 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4h259" podStartSLOduration=2.463186312 podStartE2EDuration="7.115666602s" podCreationTimestamp="2026-02-03 07:05:13 +0000 UTC" firstStartedPulling="2026-02-03 07:05:14.644286552 +0000 UTC m=+1152.930980358" lastFinishedPulling="2026-02-03 07:05:19.296766832 +0000 UTC m=+1157.583460648" observedRunningTime="2026-02-03 07:05:20.110924896 +0000 UTC m=+1158.397618752" watchObservedRunningTime="2026-02-03 07:05:20.115666602 +0000 UTC m=+1158.402360408" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.663928 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.669960 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"swift-storage-0\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " pod="openstack/swift-storage-0" Feb 03 07:05:20 crc kubenswrapper[4998]: I0203 07:05:20.824906 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:05:21 crc kubenswrapper[4998]: W0203 07:05:21.365802 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-d81c875fc5e7c5ea4b49f164f1c6719b557ebd7638ddc825f96d9a7ced3cb36b WatchSource:0}: Error finding container d81c875fc5e7c5ea4b49f164f1c6719b557ebd7638ddc825f96d9a7ced3cb36b: Status 404 returned error can't find the container with id d81c875fc5e7c5ea4b49f164f1c6719b557ebd7638ddc825f96d9a7ced3cb36b Feb 03 07:05:21 crc kubenswrapper[4998]: I0203 07:05:21.368595 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:05:21 crc kubenswrapper[4998]: I0203 07:05:21.682028 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:21 crc kubenswrapper[4998]: I0203 07:05:21.757278 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:05:21 crc kubenswrapper[4998]: I0203 07:05:21.757707 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="dnsmasq-dns" containerID="cri-o://7ca4ebc50b4c6320863dbfcf759eda98708bcd113f8b880b8e8f248e221525a8" gracePeriod=10 Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.101449 4998 generic.go:334] "Generic (PLEG): container finished" podID="c394067f-a8c9-412f-b55b-cd70e1081299" containerID="7ca4ebc50b4c6320863dbfcf759eda98708bcd113f8b880b8e8f248e221525a8" exitCode=0 Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.101529 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" event={"ID":"c394067f-a8c9-412f-b55b-cd70e1081299","Type":"ContainerDied","Data":"7ca4ebc50b4c6320863dbfcf759eda98708bcd113f8b880b8e8f248e221525a8"} Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.102942 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"d81c875fc5e7c5ea4b49f164f1c6719b557ebd7638ddc825f96d9a7ced3cb36b"} Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.292240 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.398019 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb\") pod \"c394067f-a8c9-412f-b55b-cd70e1081299\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.398137 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config\") pod \"c394067f-a8c9-412f-b55b-cd70e1081299\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.398163 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc\") pod \"c394067f-a8c9-412f-b55b-cd70e1081299\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.398189 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb\") pod \"c394067f-a8c9-412f-b55b-cd70e1081299\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.398265 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r684x\" (UniqueName: \"kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x\") pod \"c394067f-a8c9-412f-b55b-cd70e1081299\" (UID: \"c394067f-a8c9-412f-b55b-cd70e1081299\") " Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.411238 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x" (OuterVolumeSpecName: "kube-api-access-r684x") pod "c394067f-a8c9-412f-b55b-cd70e1081299" (UID: "c394067f-a8c9-412f-b55b-cd70e1081299"). InnerVolumeSpecName "kube-api-access-r684x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.442605 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c394067f-a8c9-412f-b55b-cd70e1081299" (UID: "c394067f-a8c9-412f-b55b-cd70e1081299"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.446418 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c394067f-a8c9-412f-b55b-cd70e1081299" (UID: "c394067f-a8c9-412f-b55b-cd70e1081299"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.447111 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config" (OuterVolumeSpecName: "config") pod "c394067f-a8c9-412f-b55b-cd70e1081299" (UID: "c394067f-a8c9-412f-b55b-cd70e1081299"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.454987 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c394067f-a8c9-412f-b55b-cd70e1081299" (UID: "c394067f-a8c9-412f-b55b-cd70e1081299"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.502081 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r684x\" (UniqueName: \"kubernetes.io/projected/c394067f-a8c9-412f-b55b-cd70e1081299-kube-api-access-r684x\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.502376 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.502477 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.502551 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:22 crc kubenswrapper[4998]: I0203 07:05:22.502652 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c394067f-a8c9-412f-b55b-cd70e1081299-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.112593 4998 generic.go:334] "Generic (PLEG): container finished" podID="23f901f4-5696-4545-99e5-f8b1583b7431" containerID="838afd8286ed26e4ad4d792c8d67e2582c1b7b33d6d12d95ec79703f142f40cc" exitCode=0 Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.112684 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4h259" event={"ID":"23f901f4-5696-4545-99e5-f8b1583b7431","Type":"ContainerDied","Data":"838afd8286ed26e4ad4d792c8d67e2582c1b7b33d6d12d95ec79703f142f40cc"} Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.117259 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" event={"ID":"c394067f-a8c9-412f-b55b-cd70e1081299","Type":"ContainerDied","Data":"731d1686e40519edd327df359dae4ea3b2fd2612b08abf6caf6932569e34a58a"} Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.117295 4998 scope.go:117] "RemoveContainer" containerID="7ca4ebc50b4c6320863dbfcf759eda98708bcd113f8b880b8e8f248e221525a8" Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.117332 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66b577f8c-wf7kr" Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.134950 4998 scope.go:117] "RemoveContainer" containerID="bfc537a7f6aad47ea27513107fa72a04e9180b10fc20db7547861380a59a5351" Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.156499 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:05:23 crc kubenswrapper[4998]: I0203 07:05:23.162620 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66b577f8c-wf7kr"] Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.129015 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75"} Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.129369 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de"} Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.129384 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f"} Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.129396 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f"} Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.438832 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" path="/var/lib/kubelet/pods/c394067f-a8c9-412f-b55b-cd70e1081299/volumes" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.507055 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.635450 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data\") pod \"23f901f4-5696-4545-99e5-f8b1583b7431\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.635497 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle\") pod \"23f901f4-5696-4545-99e5-f8b1583b7431\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.635522 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f86qt\" (UniqueName: \"kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt\") pod \"23f901f4-5696-4545-99e5-f8b1583b7431\" (UID: \"23f901f4-5696-4545-99e5-f8b1583b7431\") " Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.647941 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt" (OuterVolumeSpecName: "kube-api-access-f86qt") pod "23f901f4-5696-4545-99e5-f8b1583b7431" (UID: "23f901f4-5696-4545-99e5-f8b1583b7431"). InnerVolumeSpecName "kube-api-access-f86qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.655316 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23f901f4-5696-4545-99e5-f8b1583b7431" (UID: "23f901f4-5696-4545-99e5-f8b1583b7431"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.680907 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data" (OuterVolumeSpecName: "config-data") pod "23f901f4-5696-4545-99e5-f8b1583b7431" (UID: "23f901f4-5696-4545-99e5-f8b1583b7431"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.737170 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.737199 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f901f4-5696-4545-99e5-f8b1583b7431-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:24 crc kubenswrapper[4998]: I0203 07:05:24.737209 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f86qt\" (UniqueName: \"kubernetes.io/projected/23f901f4-5696-4545-99e5-f8b1583b7431-kube-api-access-f86qt\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.160296 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4h259" event={"ID":"23f901f4-5696-4545-99e5-f8b1583b7431","Type":"ContainerDied","Data":"b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c"} Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.160661 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b965d0cdcc29169c93efdc4382e3ab83a05c530e46f7593bed42a3666842ff6c" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.160559 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4h259" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.187329 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57"} Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.187368 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831"} Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.187382 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29"} Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.407799 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-f6jsx"] Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408246 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f25504d8-c4cd-467e-989f-cefa0f7f6607" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408284 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f25504d8-c4cd-467e-989f-cefa0f7f6607" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408297 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a31c817-df72-4207-976b-1caee9f68a7c" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408306 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a31c817-df72-4207-976b-1caee9f68a7c" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408323 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac702ac-33bf-46f4-966d-41573aaf1c28" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408331 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac702ac-33bf-46f4-966d-41573aaf1c28" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408345 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21b5727f-2631-4151-acf1-9b77772d57ca" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408352 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="21b5727f-2631-4151-acf1-9b77772d57ca" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408367 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f901f4-5696-4545-99e5-f8b1583b7431" containerName="keystone-db-sync" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408374 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f901f4-5696-4545-99e5-f8b1583b7431" containerName="keystone-db-sync" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408388 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb875d5f-c265-49c5-a25d-76999819149b" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408396 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb875d5f-c265-49c5-a25d-76999819149b" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408408 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="init" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408415 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="init" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408432 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408440 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: E0203 07:05:25.408457 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="dnsmasq-dns" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408465 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="dnsmasq-dns" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408666 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a31c817-df72-4207-976b-1caee9f68a7c" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408689 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb875d5f-c265-49c5-a25d-76999819149b" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408698 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="dac702ac-33bf-46f4-966d-41573aaf1c28" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408710 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f901f4-5696-4545-99e5-f8b1583b7431" containerName="keystone-db-sync" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408718 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" containerName="mariadb-account-create-update" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408734 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="21b5727f-2631-4151-acf1-9b77772d57ca" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408742 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f25504d8-c4cd-467e-989f-cefa0f7f6607" containerName="mariadb-database-create" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.408755 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c394067f-a8c9-412f-b55b-cd70e1081299" containerName="dnsmasq-dns" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.409445 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.430482 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f6jsx"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.449855 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.451380 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.476016 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.476641 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.476893 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.477014 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.477128 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-k4sf9" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.493651 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.550870 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.550938 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmkm4\" (UniqueName: \"kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551034 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551076 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551103 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551130 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551175 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551236 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551263 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md8qc\" (UniqueName: \"kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551313 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.551355 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.653035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.653439 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654092 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654136 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmkm4\" (UniqueName: \"kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654186 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654217 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654239 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654312 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654429 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.654463 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md8qc\" (UniqueName: \"kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.655667 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.655667 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.656214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.656421 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.666018 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.669116 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.673866 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.677730 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.686334 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.688704 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmkm4\" (UniqueName: \"kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4\") pod \"keystone-bootstrap-f6jsx\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.690543 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md8qc\" (UniqueName: \"kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc\") pod \"dnsmasq-dns-798d96496c-ps8sv\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.730445 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-47p8b"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.735900 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.748183 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.748361 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kk4f4" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.748582 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.772141 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-5rpgx"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.773244 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.781950 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8r7zj" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.782385 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.782615 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.800489 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-47p8b"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.832970 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.840181 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5rpgx"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.842410 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857481 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857526 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857597 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857651 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857676 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857715 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwd2q\" (UniqueName: \"kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857767 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcqz8\" (UniqueName: \"kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857821 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.857841 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.928376 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jgnc7"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.929650 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.936470 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jfbpm" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.936644 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.958791 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.958878 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.958919 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwd2q\" (UniqueName: \"kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959055 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcqz8\" (UniqueName: \"kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959127 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959147 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959179 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959194 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.959240 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.963362 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.970120 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.970291 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jgnc7"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.970734 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.971281 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.971319 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.972208 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.981211 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.990297 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sstns"] Feb 03 07:05:25 crc kubenswrapper[4998]: I0203 07:05:25.991531 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.012616 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.014304 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pflnk" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.014714 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwd2q\" (UniqueName: \"kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q\") pod \"cinder-db-sync-47p8b\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.015723 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcqz8\" (UniqueName: \"kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8\") pod \"neutron-db-sync-5rpgx\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.018849 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.028387 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.045849 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sstns"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062024 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062081 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062106 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062139 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b472\" (UniqueName: \"kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062184 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062214 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcqb4\" (UniqueName: \"kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062235 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.062486 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-47p8b" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.099009 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.100875 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.106057 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.129832 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.155946 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.158029 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.164262 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.164521 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165130 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165156 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165197 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165226 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b472\" (UniqueName: \"kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165245 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165267 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165286 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165306 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165327 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcqb4\" (UniqueName: \"kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165347 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165363 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165412 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9vk7\" (UniqueName: \"kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.165465 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.168079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.170079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.172711 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.173763 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.174885 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.175171 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.194111 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b472\" (UniqueName: \"kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472\") pod \"barbican-db-sync-jgnc7\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.195834 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.204368 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcqb4\" (UniqueName: \"kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4\") pod \"placement-db-sync-sstns\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.266671 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.266732 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.266758 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.266931 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267004 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267054 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267103 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267139 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9vk7\" (UniqueName: \"kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267198 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267230 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267275 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf82r\" (UniqueName: \"kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.267978 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.268071 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.268324 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.268559 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.272148 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.284623 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9vk7\" (UniqueName: \"kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7\") pod \"dnsmasq-dns-6844ff7b9c-r9dfn\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.351393 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sstns" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.368651 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.369906 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.370073 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.370153 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.370243 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.370366 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.370468 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf82r\" (UniqueName: \"kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.381228 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.381576 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.384569 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.385794 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.386119 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.387540 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.422388 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf82r\" (UniqueName: \"kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r\") pod \"ceilometer-0\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.441478 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.511105 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.602278 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.622315 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.641277 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.652890 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.657306 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.657533 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-w8hrx" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.657662 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.657916 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.711675 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5rpgx"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.776931 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-47p8b"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.791905 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8nzb\" (UniqueName: \"kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792021 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792105 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792182 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792263 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792412 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792513 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.792618 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.794698 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f6jsx"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.804423 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.807149 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.808716 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.809306 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.810897 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:26 crc kubenswrapper[4998]: W0203 07:05:26.847119 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3374e30b_8de6_4e33_a180_bbaab7edfffc.slice/crio-ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21 WatchSource:0}: Error finding container ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21: Status 404 returned error can't find the container with id ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21 Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895649 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895709 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895792 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895818 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895847 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895895 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895934 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8nzb\" (UniqueName: \"kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895956 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.895980 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896007 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896063 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896090 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896124 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896166 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr2lf\" (UniqueName: \"kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896229 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896794 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.896881 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.897859 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.908015 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.910685 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.911732 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.922488 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.931823 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8nzb\" (UniqueName: \"kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.960181 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998018 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998063 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998127 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998203 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr2lf\" (UniqueName: \"kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998252 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998305 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998449 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.998545 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:26 crc kubenswrapper[4998]: I0203 07:05:26.999235 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.002325 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.009375 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.010802 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.011359 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.025768 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr2lf\" (UniqueName: \"kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.026650 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.033666 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.065944 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.196950 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:05:27 crc kubenswrapper[4998]: W0203 07:05:27.199675 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod478afdb4_cf55_4653_b5d3_1a81fbfd1833.slice/crio-50d8a2b2523f67ca51e04015e2f068c8ed3dc3660439a767c2d5c3abd3545eb9 WatchSource:0}: Error finding container 50d8a2b2523f67ca51e04015e2f068c8ed3dc3660439a767c2d5c3abd3545eb9: Status 404 returned error can't find the container with id 50d8a2b2523f67ca51e04015e2f068c8ed3dc3660439a767c2d5c3abd3545eb9 Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.236870 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" event={"ID":"b348d733-e0cf-4250-babc-f0ef8f348e92","Type":"ContainerStarted","Data":"4fcbc8a9e559cff9aa1d9432f4d9edfa51b2e63824e766925aea9c86b9156a96"} Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.247241 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.248238 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-47p8b" event={"ID":"0e86027e-f5e6-40ba-af5a-275b9087dcfd","Type":"ContainerStarted","Data":"e1a84369220e245b316cabc375b526f990bb18ec9cec340ac25b3c8e6b5bfe0b"} Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.257591 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.268450 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5rpgx" event={"ID":"c79d6b94-62d5-41a1-ae40-acec75234d16","Type":"ContainerStarted","Data":"0046115393d6b5d72941ace9cc81451669d45b554b11de3ee6191103fe4ab468"} Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.270256 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6jsx" event={"ID":"3374e30b-8de6-4e33-a180-bbaab7edfffc","Type":"ContainerStarted","Data":"ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21"} Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.277144 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" event={"ID":"478afdb4-cf55-4653-b5d3-1a81fbfd1833","Type":"ContainerStarted","Data":"50d8a2b2523f67ca51e04015e2f068c8ed3dc3660439a767c2d5c3abd3545eb9"} Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.362392 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sstns"] Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.374889 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:05:27 crc kubenswrapper[4998]: I0203 07:05:27.387726 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jgnc7"] Feb 03 07:05:28 crc kubenswrapper[4998]: I0203 07:05:28.289591 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656"} Feb 03 07:05:28 crc kubenswrapper[4998]: I0203 07:05:28.439186 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:28 crc kubenswrapper[4998]: I0203 07:05:28.509572 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:05:28 crc kubenswrapper[4998]: I0203 07:05:28.523217 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:29 crc kubenswrapper[4998]: W0203 07:05:29.134557 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod500ab31f_c75b_4a96_afa1_56f868909ecb.slice/crio-4733dbedb9e09764ff35d83aa08770d01e8d249a6ff1bb9f7bcae1fb97503817 WatchSource:0}: Error finding container 4733dbedb9e09764ff35d83aa08770d01e8d249a6ff1bb9f7bcae1fb97503817: Status 404 returned error can't find the container with id 4733dbedb9e09764ff35d83aa08770d01e8d249a6ff1bb9f7bcae1fb97503817 Feb 03 07:05:29 crc kubenswrapper[4998]: I0203 07:05:29.310380 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jgnc7" event={"ID":"537b4446-c59e-4c79-9f65-2221ddb6783c","Type":"ContainerStarted","Data":"65e590cab571b3f3e33f594b9845c350650b69fef34e3e6c8418172f7de310de"} Feb 03 07:05:29 crc kubenswrapper[4998]: I0203 07:05:29.315801 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerStarted","Data":"4733dbedb9e09764ff35d83aa08770d01e8d249a6ff1bb9f7bcae1fb97503817"} Feb 03 07:05:29 crc kubenswrapper[4998]: I0203 07:05:29.317355 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sstns" event={"ID":"520a4ac6-b880-4576-8772-da70e0d8f99d","Type":"ContainerStarted","Data":"da41190e010466ea20f309848574b1a313fe643ba2ce1aa607c8bfa5a61b632e"} Feb 03 07:05:29 crc kubenswrapper[4998]: I0203 07:05:29.702029 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:29 crc kubenswrapper[4998]: I0203 07:05:29.786121 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:29 crc kubenswrapper[4998]: W0203 07:05:29.799218 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff42407_ccc7_42cc_9bf6_185cbdaa98e4.slice/crio-973117c8213aa8a3375e67487dc3dedfc56dc485860dfc4d5e1631f6e3168874 WatchSource:0}: Error finding container 973117c8213aa8a3375e67487dc3dedfc56dc485860dfc4d5e1631f6e3168874: Status 404 returned error can't find the container with id 973117c8213aa8a3375e67487dc3dedfc56dc485860dfc4d5e1631f6e3168874 Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.339719 4998 generic.go:334] "Generic (PLEG): container finished" podID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerID="0404b0c2abdf867dc35bc044f80762eddbc1b305fa8acc2d4e9280149ceafc26" exitCode=0 Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.340042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" event={"ID":"478afdb4-cf55-4653-b5d3-1a81fbfd1833","Type":"ContainerDied","Data":"0404b0c2abdf867dc35bc044f80762eddbc1b305fa8acc2d4e9280149ceafc26"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.376147 4998 generic.go:334] "Generic (PLEG): container finished" podID="b348d733-e0cf-4250-babc-f0ef8f348e92" containerID="d9781cec7bf163e30431b90c45654f6c0627b2918db8d7ef7f18c0cac054cdfd" exitCode=0 Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.376413 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" event={"ID":"b348d733-e0cf-4250-babc-f0ef8f348e92","Type":"ContainerDied","Data":"d9781cec7bf163e30431b90c45654f6c0627b2918db8d7ef7f18c0cac054cdfd"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.381969 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5rpgx" event={"ID":"c79d6b94-62d5-41a1-ae40-acec75234d16","Type":"ContainerStarted","Data":"9820beae08e9bb31b81429dd4b22978f152df4c77526ab762d9b068a4fb5c3cd"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.398823 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerStarted","Data":"c0f081ef3c7214ef67393824869e107c3a9fed7d49eadfe5abff23af2da95c3c"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.398902 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerStarted","Data":"9a7a234b943e59abaac18c6a69df7de0320e6b67b96fe6f8b297d49fa13a749d"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.419426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6jsx" event={"ID":"3374e30b-8de6-4e33-a180-bbaab7edfffc","Type":"ContainerStarted","Data":"4a1fdbdde27c80bfba169f3982695b5dbb450d92f7615a63f9245cfcb91e6ac8"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.423466 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerStarted","Data":"973117c8213aa8a3375e67487dc3dedfc56dc485860dfc4d5e1631f6e3168874"} Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.436551 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-5rpgx" podStartSLOduration=5.436530709 podStartE2EDuration="5.436530709s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:30.42993298 +0000 UTC m=+1168.716626806" watchObservedRunningTime="2026-02-03 07:05:30.436530709 +0000 UTC m=+1168.723224535" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.466655 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-f6jsx" podStartSLOduration=5.466633522 podStartE2EDuration="5.466633522s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:30.456151941 +0000 UTC m=+1168.742845757" watchObservedRunningTime="2026-02-03 07:05:30.466633522 +0000 UTC m=+1168.753327318" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.756942 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.800113 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb\") pod \"b348d733-e0cf-4250-babc-f0ef8f348e92\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.800185 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc\") pod \"b348d733-e0cf-4250-babc-f0ef8f348e92\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.800241 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config\") pod \"b348d733-e0cf-4250-babc-f0ef8f348e92\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.800264 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb\") pod \"b348d733-e0cf-4250-babc-f0ef8f348e92\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.800300 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md8qc\" (UniqueName: \"kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc\") pod \"b348d733-e0cf-4250-babc-f0ef8f348e92\" (UID: \"b348d733-e0cf-4250-babc-f0ef8f348e92\") " Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.811584 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc" (OuterVolumeSpecName: "kube-api-access-md8qc") pod "b348d733-e0cf-4250-babc-f0ef8f348e92" (UID: "b348d733-e0cf-4250-babc-f0ef8f348e92"). InnerVolumeSpecName "kube-api-access-md8qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.826630 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b348d733-e0cf-4250-babc-f0ef8f348e92" (UID: "b348d733-e0cf-4250-babc-f0ef8f348e92"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.831302 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b348d733-e0cf-4250-babc-f0ef8f348e92" (UID: "b348d733-e0cf-4250-babc-f0ef8f348e92"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.834862 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b348d733-e0cf-4250-babc-f0ef8f348e92" (UID: "b348d733-e0cf-4250-babc-f0ef8f348e92"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.844643 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config" (OuterVolumeSpecName: "config") pod "b348d733-e0cf-4250-babc-f0ef8f348e92" (UID: "b348d733-e0cf-4250-babc-f0ef8f348e92"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.902254 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.902308 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.902319 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md8qc\" (UniqueName: \"kubernetes.io/projected/b348d733-e0cf-4250-babc-f0ef8f348e92-kube-api-access-md8qc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.902328 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:30 crc kubenswrapper[4998]: I0203 07:05:30.902339 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b348d733-e0cf-4250-babc-f0ef8f348e92-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.463964 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerStarted","Data":"90b90589a1fe5f5514274bc1a1c08e61bea62fb10dc1b3c4b1f8c42dc58cab23"} Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.477890 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" event={"ID":"478afdb4-cf55-4653-b5d3-1a81fbfd1833","Type":"ContainerStarted","Data":"7b08ef25e1c94f34d90530a9b857312bd2d8527a7054c341dcb83d1f3b5a95f9"} Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.479005 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.489816 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" event={"ID":"b348d733-e0cf-4250-babc-f0ef8f348e92","Type":"ContainerDied","Data":"4fcbc8a9e559cff9aa1d9432f4d9edfa51b2e63824e766925aea9c86b9156a96"} Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.489864 4998 scope.go:117] "RemoveContainer" containerID="d9781cec7bf163e30431b90c45654f6c0627b2918db8d7ef7f18c0cac054cdfd" Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.489993 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d96496c-ps8sv" Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.531607 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" podStartSLOduration=6.531587543 podStartE2EDuration="6.531587543s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:31.527341022 +0000 UTC m=+1169.814034848" watchObservedRunningTime="2026-02-03 07:05:31.531587543 +0000 UTC m=+1169.818281349" Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.550769 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-log" containerID="cri-o://c0f081ef3c7214ef67393824869e107c3a9fed7d49eadfe5abff23af2da95c3c" gracePeriod=30 Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.551060 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerStarted","Data":"f2ec0b809779eea23a487876b7bbeded24b8218d7280cc41418feac74bba04db"} Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.551080 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-httpd" containerID="cri-o://f2ec0b809779eea23a487876b7bbeded24b8218d7280cc41418feac74bba04db" gracePeriod=30 Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.674858 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.676690 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-798d96496c-ps8sv"] Feb 03 07:05:31 crc kubenswrapper[4998]: I0203 07:05:31.685081 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.685064822 podStartE2EDuration="6.685064822s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:31.61626355 +0000 UTC m=+1169.902957376" watchObservedRunningTime="2026-02-03 07:05:31.685064822 +0000 UTC m=+1169.971758628" Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.441117 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b348d733-e0cf-4250-babc-f0ef8f348e92" path="/var/lib/kubelet/pods/b348d733-e0cf-4250-babc-f0ef8f348e92/volumes" Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.560580 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerStarted","Data":"856d12e6a4c251904762f72f02eede8e6a8ed99b4af1b55690aa3d3c5460a6d6"} Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.560678 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-log" containerID="cri-o://90b90589a1fe5f5514274bc1a1c08e61bea62fb10dc1b3c4b1f8c42dc58cab23" gracePeriod=30 Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.560744 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-httpd" containerID="cri-o://856d12e6a4c251904762f72f02eede8e6a8ed99b4af1b55690aa3d3c5460a6d6" gracePeriod=30 Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.571464 4998 generic.go:334] "Generic (PLEG): container finished" podID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerID="f2ec0b809779eea23a487876b7bbeded24b8218d7280cc41418feac74bba04db" exitCode=143 Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.571507 4998 generic.go:334] "Generic (PLEG): container finished" podID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerID="c0f081ef3c7214ef67393824869e107c3a9fed7d49eadfe5abff23af2da95c3c" exitCode=143 Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.571579 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerDied","Data":"f2ec0b809779eea23a487876b7bbeded24b8218d7280cc41418feac74bba04db"} Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.571624 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerDied","Data":"c0f081ef3c7214ef67393824869e107c3a9fed7d49eadfe5abff23af2da95c3c"} Feb 03 07:05:32 crc kubenswrapper[4998]: I0203 07:05:32.610609 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.610588477 podStartE2EDuration="7.610588477s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:32.584745136 +0000 UTC m=+1170.871438962" watchObservedRunningTime="2026-02-03 07:05:32.610588477 +0000 UTC m=+1170.897282273" Feb 03 07:05:33 crc kubenswrapper[4998]: I0203 07:05:33.585094 4998 generic.go:334] "Generic (PLEG): container finished" podID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerID="856d12e6a4c251904762f72f02eede8e6a8ed99b4af1b55690aa3d3c5460a6d6" exitCode=0 Feb 03 07:05:33 crc kubenswrapper[4998]: I0203 07:05:33.585646 4998 generic.go:334] "Generic (PLEG): container finished" podID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerID="90b90589a1fe5f5514274bc1a1c08e61bea62fb10dc1b3c4b1f8c42dc58cab23" exitCode=143 Feb 03 07:05:33 crc kubenswrapper[4998]: I0203 07:05:33.585146 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerDied","Data":"856d12e6a4c251904762f72f02eede8e6a8ed99b4af1b55690aa3d3c5460a6d6"} Feb 03 07:05:33 crc kubenswrapper[4998]: I0203 07:05:33.585749 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerDied","Data":"90b90589a1fe5f5514274bc1a1c08e61bea62fb10dc1b3c4b1f8c42dc58cab23"} Feb 03 07:05:34 crc kubenswrapper[4998]: I0203 07:05:34.598417 4998 generic.go:334] "Generic (PLEG): container finished" podID="3374e30b-8de6-4e33-a180-bbaab7edfffc" containerID="4a1fdbdde27c80bfba169f3982695b5dbb450d92f7615a63f9245cfcb91e6ac8" exitCode=0 Feb 03 07:05:34 crc kubenswrapper[4998]: I0203 07:05:34.598460 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6jsx" event={"ID":"3374e30b-8de6-4e33-a180-bbaab7edfffc","Type":"ContainerDied","Data":"4a1fdbdde27c80bfba169f3982695b5dbb450d92f7615a63f9245cfcb91e6ac8"} Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.296316 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482553 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482685 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482713 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482742 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8nzb\" (UniqueName: \"kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482852 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482905 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.482966 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.483257 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs" (OuterVolumeSpecName: "logs") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.483280 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.484161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs\") pod \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\" (UID: \"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4\") " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.484811 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.484833 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.489667 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb" (OuterVolumeSpecName: "kube-api-access-c8nzb") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "kube-api-access-c8nzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.490987 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.497698 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts" (OuterVolumeSpecName: "scripts") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.519397 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.543534 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data" (OuterVolumeSpecName: "config-data") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.558079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" (UID: "7ff42407-ccc7-42cc-9bf6-185cbdaa98e4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586032 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586070 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8nzb\" (UniqueName: \"kubernetes.io/projected/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-kube-api-access-c8nzb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586082 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586118 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586130 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.586140 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.605267 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.608585 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.608649 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ff42407-ccc7-42cc-9bf6-185cbdaa98e4","Type":"ContainerDied","Data":"973117c8213aa8a3375e67487dc3dedfc56dc485860dfc4d5e1631f6e3168874"} Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.608696 4998 scope.go:117] "RemoveContainer" containerID="856d12e6a4c251904762f72f02eede8e6a8ed99b4af1b55690aa3d3c5460a6d6" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.655405 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.671890 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684007 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:35 crc kubenswrapper[4998]: E0203 07:05:35.684436 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-httpd" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684453 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-httpd" Feb 03 07:05:35 crc kubenswrapper[4998]: E0203 07:05:35.684478 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b348d733-e0cf-4250-babc-f0ef8f348e92" containerName="init" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684486 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b348d733-e0cf-4250-babc-f0ef8f348e92" containerName="init" Feb 03 07:05:35 crc kubenswrapper[4998]: E0203 07:05:35.684510 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-log" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684517 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-log" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684730 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b348d733-e0cf-4250-babc-f0ef8f348e92" containerName="init" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684752 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-httpd" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.684765 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" containerName="glance-log" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.685874 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.688357 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.689252 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.689873 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.727005 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.790916 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.790956 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.790978 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcml2\" (UniqueName: \"kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.790997 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.791017 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.791047 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.791094 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.791137 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893129 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893496 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893538 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcml2\" (UniqueName: \"kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893580 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893611 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893668 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893775 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.893967 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.894543 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.894621 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.899372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.900291 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.905354 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.913576 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcml2\" (UniqueName: \"kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.915065 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.956582 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:35 crc kubenswrapper[4998]: I0203 07:05:35.997231 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " pod="openstack/glance-default-external-api-0" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.021016 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.439837 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff42407-ccc7-42cc-9bf6-185cbdaa98e4" path="/var/lib/kubelet/pods/7ff42407-ccc7-42cc-9bf6-185cbdaa98e4/volumes" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.442917 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.514442 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.514686 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" containerID="cri-o://70411b347a9e959e62d523e1073fa00e328f7777da526db88ffb79ecb94830d9" gracePeriod=10 Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.680988 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.775360 4998 scope.go:117] "RemoveContainer" containerID="90b90589a1fe5f5514274bc1a1c08e61bea62fb10dc1b3c4b1f8c42dc58cab23" Feb 03 07:05:36 crc kubenswrapper[4998]: I0203 07:05:36.863176 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.014052 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmkm4\" (UniqueName: \"kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.014208 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.015227 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.015264 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.015315 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.015344 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data\") pod \"3374e30b-8de6-4e33-a180-bbaab7edfffc\" (UID: \"3374e30b-8de6-4e33-a180-bbaab7edfffc\") " Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.020144 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.020702 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4" (OuterVolumeSpecName: "kube-api-access-nmkm4") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "kube-api-access-nmkm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.023145 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.040967 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts" (OuterVolumeSpecName: "scripts") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.045908 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.047362 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data" (OuterVolumeSpecName: "config-data") pod "3374e30b-8de6-4e33-a180-bbaab7edfffc" (UID: "3374e30b-8de6-4e33-a180-bbaab7edfffc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119060 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119374 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119390 4998 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119402 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119414 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3374e30b-8de6-4e33-a180-bbaab7edfffc-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.119425 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmkm4\" (UniqueName: \"kubernetes.io/projected/3374e30b-8de6-4e33-a180-bbaab7edfffc-kube-api-access-nmkm4\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.637985 4998 generic.go:334] "Generic (PLEG): container finished" podID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerID="70411b347a9e959e62d523e1073fa00e328f7777da526db88ffb79ecb94830d9" exitCode=0 Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.638067 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" event={"ID":"cf2f7b9f-8064-4194-bff3-481d1e9b4221","Type":"ContainerDied","Data":"70411b347a9e959e62d523e1073fa00e328f7777da526db88ffb79ecb94830d9"} Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.640142 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6jsx" event={"ID":"3374e30b-8de6-4e33-a180-bbaab7edfffc","Type":"ContainerDied","Data":"ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21"} Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.640178 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed31fa59a8d0f421fc519fe4f075b7847e08751e2e64c9bbed4f6bce847a0d21" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.640228 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6jsx" Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.963095 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-f6jsx"] Feb 03 07:05:37 crc kubenswrapper[4998]: I0203 07:05:37.971917 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-f6jsx"] Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.050926 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kl26j"] Feb 03 07:05:38 crc kubenswrapper[4998]: E0203 07:05:38.051374 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3374e30b-8de6-4e33-a180-bbaab7edfffc" containerName="keystone-bootstrap" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.051399 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3374e30b-8de6-4e33-a180-bbaab7edfffc" containerName="keystone-bootstrap" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.051640 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3374e30b-8de6-4e33-a180-bbaab7edfffc" containerName="keystone-bootstrap" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.052331 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.054078 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.054560 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.054767 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-k4sf9" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.054813 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.058356 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kl26j"] Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.071141 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245085 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbmjl\" (UniqueName: \"kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245448 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245482 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245530 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245631 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.245769 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347124 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347215 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347291 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbmjl\" (UniqueName: \"kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347336 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347374 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.347423 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.352803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.352844 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.353077 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.353096 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.355125 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.364719 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbmjl\" (UniqueName: \"kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl\") pod \"keystone-bootstrap-kl26j\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.381674 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:38 crc kubenswrapper[4998]: I0203 07:05:38.438711 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3374e30b-8de6-4e33-a180-bbaab7edfffc" path="/var/lib/kubelet/pods/3374e30b-8de6-4e33-a180-bbaab7edfffc/volumes" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.481917 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.503481 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr2lf\" (UniqueName: \"kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.503551 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.503581 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.528371 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf" (OuterVolumeSpecName: "kube-api-access-kr2lf") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "kube-api-access-kr2lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.541931 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.567889 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data" (OuterVolumeSpecName: "config-data") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.605747 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.605890 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.605981 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606008 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606083 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run\") pod \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\" (UID: \"b57813b2-2ae4-497d-86b4-250ddc3d87a5\") " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606292 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs" (OuterVolumeSpecName: "logs") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606556 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606843 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606868 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr2lf\" (UniqueName: \"kubernetes.io/projected/b57813b2-2ae4-497d-86b4-250ddc3d87a5-kube-api-access-kr2lf\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606878 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606887 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.606895 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b57813b2-2ae4-497d-86b4-250ddc3d87a5-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.611183 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts" (OuterVolumeSpecName: "scripts") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.613074 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.647539 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b57813b2-2ae4-497d-86b4-250ddc3d87a5" (UID: "b57813b2-2ae4-497d-86b4-250ddc3d87a5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.674484 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b57813b2-2ae4-497d-86b4-250ddc3d87a5","Type":"ContainerDied","Data":"9a7a234b943e59abaac18c6a69df7de0320e6b67b96fe6f8b297d49fa13a749d"} Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.674765 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.710623 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.710655 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57813b2-2ae4-497d-86b4-250ddc3d87a5-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.710687 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.719552 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.733005 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.739409 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.751954 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:41 crc kubenswrapper[4998]: E0203 07:05:41.752328 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-log" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.752345 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-log" Feb 03 07:05:41 crc kubenswrapper[4998]: E0203 07:05:41.752384 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-httpd" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.752391 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-httpd" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.752540 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-log" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.752555 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" containerName="glance-httpd" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.753381 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.755117 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.755330 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.763817 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.812449 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914118 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86kjg\" (UniqueName: \"kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914217 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914265 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914405 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914445 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914490 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914532 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:41 crc kubenswrapper[4998]: I0203 07:05:41.914599 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016311 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016350 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86kjg\" (UniqueName: \"kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016398 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016425 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016479 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016498 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016535 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016655 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.016601 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.017206 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.017314 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.020998 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.021205 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.021706 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.022487 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.038034 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86kjg\" (UniqueName: \"kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.046472 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.081227 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.438006 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b57813b2-2ae4-497d-86b4-250ddc3d87a5" path="/var/lib/kubelet/pods/b57813b2-2ae4-497d-86b4-250ddc3d87a5/volumes" Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.754169 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:05:42 crc kubenswrapper[4998]: I0203 07:05:42.754225 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:05:46 crc kubenswrapper[4998]: I0203 07:05:46.681398 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Feb 03 07:05:48 crc kubenswrapper[4998]: E0203 07:05:48.895686 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5ac8ede62671a3b3695cf29bd3a6f124f27c93d1730f9030cc3daa05034d4af4" Feb 03 07:05:48 crc kubenswrapper[4998]: E0203 07:05:48.896101 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5ac8ede62671a3b3695cf29bd3a6f124f27c93d1730f9030cc3daa05034d4af4,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n56h656h586h55ch55fh59dh7fh656h5f8h97h667h644h546h8bh669h668h5b5h644h654h77h8h58dhffhfhbch66h5c7h5b9hb6h5cch59ch55bq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rf82r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(500ab31f-c75b-4a96-afa1-56f868909ecb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:05:48 crc kubenswrapper[4998]: I0203 07:05:48.907340 4998 scope.go:117] "RemoveContainer" containerID="f2ec0b809779eea23a487876b7bbeded24b8218d7280cc41418feac74bba04db" Feb 03 07:05:48 crc kubenswrapper[4998]: I0203 07:05:48.989257 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.055469 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc\") pod \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.055584 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb\") pod \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.055697 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7bqs\" (UniqueName: \"kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs\") pod \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.055715 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config\") pod \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.055817 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb\") pod \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\" (UID: \"cf2f7b9f-8064-4194-bff3-481d1e9b4221\") " Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.067804 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs" (OuterVolumeSpecName: "kube-api-access-t7bqs") pod "cf2f7b9f-8064-4194-bff3-481d1e9b4221" (UID: "cf2f7b9f-8064-4194-bff3-481d1e9b4221"). InnerVolumeSpecName "kube-api-access-t7bqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.099679 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cf2f7b9f-8064-4194-bff3-481d1e9b4221" (UID: "cf2f7b9f-8064-4194-bff3-481d1e9b4221"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.103964 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cf2f7b9f-8064-4194-bff3-481d1e9b4221" (UID: "cf2f7b9f-8064-4194-bff3-481d1e9b4221"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.106400 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cf2f7b9f-8064-4194-bff3-481d1e9b4221" (UID: "cf2f7b9f-8064-4194-bff3-481d1e9b4221"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.109374 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config" (OuterVolumeSpecName: "config") pod "cf2f7b9f-8064-4194-bff3-481d1e9b4221" (UID: "cf2f7b9f-8064-4194-bff3-481d1e9b4221"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.157290 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7bqs\" (UniqueName: \"kubernetes.io/projected/cf2f7b9f-8064-4194-bff3-481d1e9b4221-kube-api-access-t7bqs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.157336 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.157345 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.157353 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.157361 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf2f7b9f-8064-4194-bff3-481d1e9b4221-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.738428 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" event={"ID":"cf2f7b9f-8064-4194-bff3-481d1e9b4221","Type":"ContainerDied","Data":"7f3eea7af726d074d9cd42bdb3d70a0b4b4226ce9ccf6bbef5e7483f48c9f2da"} Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.738493 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.773719 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.780605 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d489f5d97-pntcj"] Feb 03 07:05:49 crc kubenswrapper[4998]: I0203 07:05:49.934184 4998 scope.go:117] "RemoveContainer" containerID="c0f081ef3c7214ef67393824869e107c3a9fed7d49eadfe5abff23af2da95c3c" Feb 03 07:05:49 crc kubenswrapper[4998]: E0203 07:05:49.951545 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:3fa6e687aa002b92fedbfe2c1ccaa2906b399c58d17bf9ecece2c4cd69a0210b" Feb 03 07:05:49 crc kubenswrapper[4998]: E0203 07:05:49.951738 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:3fa6e687aa002b92fedbfe2c1ccaa2906b399c58d17bf9ecece2c4cd69a0210b,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mwd2q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-47p8b_openstack(0e86027e-f5e6-40ba-af5a-275b9087dcfd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:05:49 crc kubenswrapper[4998]: E0203 07:05:49.953483 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-47p8b" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.144121 4998 scope.go:117] "RemoveContainer" containerID="70411b347a9e959e62d523e1073fa00e328f7777da526db88ffb79ecb94830d9" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.176046 4998 scope.go:117] "RemoveContainer" containerID="4437c1cd8a0173e6e62c2f463d5924573836fa7c3fdbb56d6f1aa151344acd9d" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.405554 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kl26j"] Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.446931 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" path="/var/lib/kubelet/pods/cf2f7b9f-8064-4194-bff3-481d1e9b4221/volumes" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.522689 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:05:50 crc kubenswrapper[4998]: W0203 07:05:50.534223 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0cbd21f_2ac7_4c48_a160_18e816ed8b2e.slice/crio-a499ee8bd5bd4bf2f56bb3e9a601d3d7a4abbc84fc340c83cc59ed733fc213e9 WatchSource:0}: Error finding container a499ee8bd5bd4bf2f56bb3e9a601d3d7a4abbc84fc340c83cc59ed733fc213e9: Status 404 returned error can't find the container with id a499ee8bd5bd4bf2f56bb3e9a601d3d7a4abbc84fc340c83cc59ed733fc213e9 Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.615997 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.749183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kl26j" event={"ID":"ca949bbc-e75e-48f4-80f2-825ec09184a3","Type":"ContainerStarted","Data":"15969ca8a4285b78e3bba616374d40ab46d9a072768d82eb0eb8b367ef8200ab"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.749492 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kl26j" event={"ID":"ca949bbc-e75e-48f4-80f2-825ec09184a3","Type":"ContainerStarted","Data":"106195e9611ed3e3e54d6e04474be446c76494674225111a7afe122926773b9a"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.755347 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jgnc7" event={"ID":"537b4446-c59e-4c79-9f65-2221ddb6783c","Type":"ContainerStarted","Data":"1ff4307050afd5c78538e5168c27ca50b3d89c99e4973a0736340d10a37a60f9"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.757385 4998 generic.go:334] "Generic (PLEG): container finished" podID="c79d6b94-62d5-41a1-ae40-acec75234d16" containerID="9820beae08e9bb31b81429dd4b22978f152df4c77526ab762d9b068a4fb5c3cd" exitCode=0 Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.757432 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5rpgx" event={"ID":"c79d6b94-62d5-41a1-ae40-acec75234d16","Type":"ContainerDied","Data":"9820beae08e9bb31b81429dd4b22978f152df4c77526ab762d9b068a4fb5c3cd"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.759406 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerStarted","Data":"a499ee8bd5bd4bf2f56bb3e9a601d3d7a4abbc84fc340c83cc59ed733fc213e9"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.765470 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.765503 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.765515 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5"} Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.775256 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kl26j" podStartSLOduration=12.775240459 podStartE2EDuration="12.775240459s" podCreationTimestamp="2026-02-03 07:05:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:50.769919997 +0000 UTC m=+1189.056613823" watchObservedRunningTime="2026-02-03 07:05:50.775240459 +0000 UTC m=+1189.061934255" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.778622 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sstns" event={"ID":"520a4ac6-b880-4576-8772-da70e0d8f99d","Type":"ContainerStarted","Data":"e061b29717ef5f300f95b7d0a0463d74793d4d1ac725755bcc7f09fcec627d2a"} Feb 03 07:05:50 crc kubenswrapper[4998]: E0203 07:05:50.783932 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:3fa6e687aa002b92fedbfe2c1ccaa2906b399c58d17bf9ecece2c4cd69a0210b\\\"\"" pod="openstack/cinder-db-sync-47p8b" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.793427 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jgnc7" podStartSLOduration=5.017747537 podStartE2EDuration="25.79341179s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="2026-02-03 07:05:29.134835822 +0000 UTC m=+1167.421529628" lastFinishedPulling="2026-02-03 07:05:49.910500065 +0000 UTC m=+1188.197193881" observedRunningTime="2026-02-03 07:05:50.790731013 +0000 UTC m=+1189.077424829" watchObservedRunningTime="2026-02-03 07:05:50.79341179 +0000 UTC m=+1189.080105596" Feb 03 07:05:50 crc kubenswrapper[4998]: I0203 07:05:50.820599 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sstns" podStartSLOduration=5.051669468 podStartE2EDuration="25.820581479s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="2026-02-03 07:05:29.130166178 +0000 UTC m=+1167.416859984" lastFinishedPulling="2026-02-03 07:05:49.899078189 +0000 UTC m=+1188.185771995" observedRunningTime="2026-02-03 07:05:50.818094717 +0000 UTC m=+1189.104788533" watchObservedRunningTime="2026-02-03 07:05:50.820581479 +0000 UTC m=+1189.107275285" Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.682980 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-d489f5d97-pntcj" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.789273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerStarted","Data":"35b45a5b0f8581c84fc3e062610dd5acaf810616fda9be2adfa678c3edf552b3"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.802093 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.802142 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.802156 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.806055 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerStarted","Data":"b2e6b960df78a7b235174eed618b1694fe06dbf454005c530f409ecf38f4fe69"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.806096 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerStarted","Data":"4d18402fc804ad1803a6c38008bb96a38925ddb4bd17e296638e85595ddaf9e5"} Feb 03 07:05:51 crc kubenswrapper[4998]: I0203 07:05:51.810487 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerStarted","Data":"9944aec61ca304a54ffb18464be33901737c98924ce176c77f6e3a6e417f0ecd"} Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.191646 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.218094 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle\") pod \"c79d6b94-62d5-41a1-ae40-acec75234d16\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.218164 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config\") pod \"c79d6b94-62d5-41a1-ae40-acec75234d16\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.218251 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqz8\" (UniqueName: \"kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8\") pod \"c79d6b94-62d5-41a1-ae40-acec75234d16\" (UID: \"c79d6b94-62d5-41a1-ae40-acec75234d16\") " Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.224823 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8" (OuterVolumeSpecName: "kube-api-access-fcqz8") pod "c79d6b94-62d5-41a1-ae40-acec75234d16" (UID: "c79d6b94-62d5-41a1-ae40-acec75234d16"). InnerVolumeSpecName "kube-api-access-fcqz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.269391 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c79d6b94-62d5-41a1-ae40-acec75234d16" (UID: "c79d6b94-62d5-41a1-ae40-acec75234d16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.283405 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config" (OuterVolumeSpecName: "config") pod "c79d6b94-62d5-41a1-ae40-acec75234d16" (UID: "c79d6b94-62d5-41a1-ae40-acec75234d16"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.322916 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqz8\" (UniqueName: \"kubernetes.io/projected/c79d6b94-62d5-41a1-ae40-acec75234d16-kube-api-access-fcqz8\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.322950 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.322962 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c79d6b94-62d5-41a1-ae40-acec75234d16-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.853301 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5rpgx" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.853868 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5rpgx" event={"ID":"c79d6b94-62d5-41a1-ae40-acec75234d16","Type":"ContainerDied","Data":"0046115393d6b5d72941ace9cc81451669d45b554b11de3ee6191103fe4ab468"} Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.853909 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0046115393d6b5d72941ace9cc81451669d45b554b11de3ee6191103fe4ab468" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.865927 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerStarted","Data":"3c1e4ffe1c3edfbfc9d77cde2bc9b71dfa60f0a7da07ebbb4913417195ff50c3"} Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.875416 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerStarted","Data":"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e"} Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.894586 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerStarted","Data":"815107d7365bb8588bc5fb81ef98f19fde8733cb829325ab5989bc5fcfe8dc4a"} Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.910597 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=17.910580089 podStartE2EDuration="17.910580089s" podCreationTimestamp="2026-02-03 07:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:52.906180353 +0000 UTC m=+1191.192874159" watchObservedRunningTime="2026-02-03 07:05:52.910580089 +0000 UTC m=+1191.197273895" Feb 03 07:05:52 crc kubenswrapper[4998]: I0203 07:05:52.979367 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=52.134251818 podStartE2EDuration="1m5.97934793s" podCreationTimestamp="2026-02-03 07:04:47 +0000 UTC" firstStartedPulling="2026-02-03 07:05:21.368331223 +0000 UTC m=+1159.655025039" lastFinishedPulling="2026-02-03 07:05:35.213427345 +0000 UTC m=+1173.500121151" observedRunningTime="2026-02-03 07:05:52.976098256 +0000 UTC m=+1191.262792062" watchObservedRunningTime="2026-02-03 07:05:52.97934793 +0000 UTC m=+1191.266041736" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.015929 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=12.015906097 podStartE2EDuration="12.015906097s" podCreationTimestamp="2026-02-03 07:05:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:53.00170104 +0000 UTC m=+1191.288394846" watchObservedRunningTime="2026-02-03 07:05:53.015906097 +0000 UTC m=+1191.302599903" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.041641 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:53 crc kubenswrapper[4998]: E0203 07:05:53.042828 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.042857 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" Feb 03 07:05:53 crc kubenswrapper[4998]: E0203 07:05:53.042871 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="init" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.042880 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="init" Feb 03 07:05:53 crc kubenswrapper[4998]: E0203 07:05:53.042906 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79d6b94-62d5-41a1-ae40-acec75234d16" containerName="neutron-db-sync" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.042914 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79d6b94-62d5-41a1-ae40-acec75234d16" containerName="neutron-db-sync" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.043463 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c79d6b94-62d5-41a1-ae40-acec75234d16" containerName="neutron-db-sync" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.043492 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf2f7b9f-8064-4194-bff3-481d1e9b4221" containerName="dnsmasq-dns" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.044360 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.050150 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.146312 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.146396 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.146423 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.146533 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctg4w\" (UniqueName: \"kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.146601 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.162064 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.164088 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.168064 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.168755 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8r7zj" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.168875 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.172025 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.193677 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.247876 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.247944 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.247972 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.247994 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248018 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248045 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248083 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xcf7\" (UniqueName: \"kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248101 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248117 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.248204 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctg4w\" (UniqueName: \"kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.249566 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.249937 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.253101 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.253161 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.304974 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctg4w\" (UniqueName: \"kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w\") pod \"dnsmasq-dns-76d9cfb7bf-tm9cn\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.347793 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.348383 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.349360 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.350318 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.350412 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.350500 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.350616 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xcf7\" (UniqueName: \"kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.355157 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.362452 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.362677 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.366510 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.389353 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.391069 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.396798 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xcf7\" (UniqueName: \"kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7\") pod \"neutron-7787cd6574-rthd9\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.397225 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.413465 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.513438 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.554503 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.554838 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.554910 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbpc7\" (UniqueName: \"kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.554954 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.555003 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.555162 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657202 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657272 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbpc7\" (UniqueName: \"kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657306 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657340 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657372 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.657460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.658440 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.658503 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.658564 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.660853 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.661510 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.682463 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbpc7\" (UniqueName: \"kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7\") pod \"dnsmasq-dns-86dd6887f5-rkh8v\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.790070 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.872231 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.945262 4998 generic.go:334] "Generic (PLEG): container finished" podID="520a4ac6-b880-4576-8772-da70e0d8f99d" containerID="e061b29717ef5f300f95b7d0a0463d74793d4d1ac725755bcc7f09fcec627d2a" exitCode=0 Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.945616 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sstns" event={"ID":"520a4ac6-b880-4576-8772-da70e0d8f99d","Type":"ContainerDied","Data":"e061b29717ef5f300f95b7d0a0463d74793d4d1ac725755bcc7f09fcec627d2a"} Feb 03 07:05:53 crc kubenswrapper[4998]: I0203 07:05:53.963480 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" event={"ID":"f5aa646b-a693-467e-80fc-9b5afbbb0897","Type":"ContainerStarted","Data":"a8487a5b7b721b6ee195b2628e91d4d277b91293489d3622ffc9a1d0097341fd"} Feb 03 07:05:54 crc kubenswrapper[4998]: I0203 07:05:54.165470 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:05:54 crc kubenswrapper[4998]: I0203 07:05:54.344763 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:05:54 crc kubenswrapper[4998]: W0203 07:05:54.345094 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c32a76b_d239_4e97_a16a_23423046cbec.slice/crio-84917370cc27dbc3c0a3310d9f28b8946d30936ba5aa42fa7acfe761ff151b50 WatchSource:0}: Error finding container 84917370cc27dbc3c0a3310d9f28b8946d30936ba5aa42fa7acfe761ff151b50: Status 404 returned error can't find the container with id 84917370cc27dbc3c0a3310d9f28b8946d30936ba5aa42fa7acfe761ff151b50 Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.020984 4998 generic.go:334] "Generic (PLEG): container finished" podID="ca949bbc-e75e-48f4-80f2-825ec09184a3" containerID="15969ca8a4285b78e3bba616374d40ab46d9a072768d82eb0eb8b367ef8200ab" exitCode=0 Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.021231 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kl26j" event={"ID":"ca949bbc-e75e-48f4-80f2-825ec09184a3","Type":"ContainerDied","Data":"15969ca8a4285b78e3bba616374d40ab46d9a072768d82eb0eb8b367ef8200ab"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.027454 4998 generic.go:334] "Generic (PLEG): container finished" podID="537b4446-c59e-4c79-9f65-2221ddb6783c" containerID="1ff4307050afd5c78538e5168c27ca50b3d89c99e4973a0736340d10a37a60f9" exitCode=0 Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.027542 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jgnc7" event={"ID":"537b4446-c59e-4c79-9f65-2221ddb6783c","Type":"ContainerDied","Data":"1ff4307050afd5c78538e5168c27ca50b3d89c99e4973a0736340d10a37a60f9"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.033586 4998 generic.go:334] "Generic (PLEG): container finished" podID="2c32a76b-d239-4e97-a16a-23423046cbec" containerID="46b567c279100add9fdeae949e5c76a2d251f70543b418fd3c7e656effeac64d" exitCode=0 Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.033657 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" event={"ID":"2c32a76b-d239-4e97-a16a-23423046cbec","Type":"ContainerDied","Data":"46b567c279100add9fdeae949e5c76a2d251f70543b418fd3c7e656effeac64d"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.033687 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" event={"ID":"2c32a76b-d239-4e97-a16a-23423046cbec","Type":"ContainerStarted","Data":"84917370cc27dbc3c0a3310d9f28b8946d30936ba5aa42fa7acfe761ff151b50"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.043060 4998 generic.go:334] "Generic (PLEG): container finished" podID="f5aa646b-a693-467e-80fc-9b5afbbb0897" containerID="d886674aabcde3a104ab8aaac89b96b03826227d109e838b463242abf99a6299" exitCode=0 Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.043159 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" event={"ID":"f5aa646b-a693-467e-80fc-9b5afbbb0897","Type":"ContainerDied","Data":"d886674aabcde3a104ab8aaac89b96b03826227d109e838b463242abf99a6299"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.047236 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerStarted","Data":"f311a13a918df92982ce4a22f5dec1f02faec87241db57192d352931f4fee838"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.047275 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerStarted","Data":"038ddedd8269a4ec6b7e004267d315bb7d8a78e0674e67c6566730cef59cf318"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.047287 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerStarted","Data":"015a5235414b7f07773d5a5c216add7dd2cadfcd9e9548caa9add60f57a0d0c7"} Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.047301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.106455 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7787cd6574-rthd9" podStartSLOduration=2.106439372 podStartE2EDuration="2.106439372s" podCreationTimestamp="2026-02-03 07:05:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:55.093606674 +0000 UTC m=+1193.380300480" watchObservedRunningTime="2026-02-03 07:05:55.106439372 +0000 UTC m=+1193.393133178" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.391730 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.502541 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sstns" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.527964 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config\") pod \"f5aa646b-a693-467e-80fc-9b5afbbb0897\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.528043 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb\") pod \"f5aa646b-a693-467e-80fc-9b5afbbb0897\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.528243 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctg4w\" (UniqueName: \"kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w\") pod \"f5aa646b-a693-467e-80fc-9b5afbbb0897\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.528269 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc\") pod \"f5aa646b-a693-467e-80fc-9b5afbbb0897\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.528289 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb\") pod \"f5aa646b-a693-467e-80fc-9b5afbbb0897\" (UID: \"f5aa646b-a693-467e-80fc-9b5afbbb0897\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.534541 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w" (OuterVolumeSpecName: "kube-api-access-ctg4w") pod "f5aa646b-a693-467e-80fc-9b5afbbb0897" (UID: "f5aa646b-a693-467e-80fc-9b5afbbb0897"). InnerVolumeSpecName "kube-api-access-ctg4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.550356 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config" (OuterVolumeSpecName: "config") pod "f5aa646b-a693-467e-80fc-9b5afbbb0897" (UID: "f5aa646b-a693-467e-80fc-9b5afbbb0897"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.552568 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f5aa646b-a693-467e-80fc-9b5afbbb0897" (UID: "f5aa646b-a693-467e-80fc-9b5afbbb0897"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.552739 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f5aa646b-a693-467e-80fc-9b5afbbb0897" (UID: "f5aa646b-a693-467e-80fc-9b5afbbb0897"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.571568 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f5aa646b-a693-467e-80fc-9b5afbbb0897" (UID: "f5aa646b-a693-467e-80fc-9b5afbbb0897"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630055 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data\") pod \"520a4ac6-b880-4576-8772-da70e0d8f99d\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630101 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle\") pod \"520a4ac6-b880-4576-8772-da70e0d8f99d\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630126 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs\") pod \"520a4ac6-b880-4576-8772-da70e0d8f99d\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630175 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcqb4\" (UniqueName: \"kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4\") pod \"520a4ac6-b880-4576-8772-da70e0d8f99d\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630289 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts\") pod \"520a4ac6-b880-4576-8772-da70e0d8f99d\" (UID: \"520a4ac6-b880-4576-8772-da70e0d8f99d\") " Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.630817 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs" (OuterVolumeSpecName: "logs") pod "520a4ac6-b880-4576-8772-da70e0d8f99d" (UID: "520a4ac6-b880-4576-8772-da70e0d8f99d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631919 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631939 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctg4w\" (UniqueName: \"kubernetes.io/projected/f5aa646b-a693-467e-80fc-9b5afbbb0897-kube-api-access-ctg4w\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631951 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631960 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631970 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/520a4ac6-b880-4576-8772-da70e0d8f99d-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.631977 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aa646b-a693-467e-80fc-9b5afbbb0897-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.634396 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts" (OuterVolumeSpecName: "scripts") pod "520a4ac6-b880-4576-8772-da70e0d8f99d" (UID: "520a4ac6-b880-4576-8772-da70e0d8f99d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.635248 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4" (OuterVolumeSpecName: "kube-api-access-hcqb4") pod "520a4ac6-b880-4576-8772-da70e0d8f99d" (UID: "520a4ac6-b880-4576-8772-da70e0d8f99d"). InnerVolumeSpecName "kube-api-access-hcqb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.650636 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data" (OuterVolumeSpecName: "config-data") pod "520a4ac6-b880-4576-8772-da70e0d8f99d" (UID: "520a4ac6-b880-4576-8772-da70e0d8f99d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.650879 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "520a4ac6-b880-4576-8772-da70e0d8f99d" (UID: "520a4ac6-b880-4576-8772-da70e0d8f99d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.734667 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.734711 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.735150 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcqb4\" (UniqueName: \"kubernetes.io/projected/520a4ac6-b880-4576-8772-da70e0d8f99d-kube-api-access-hcqb4\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.735173 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520a4ac6-b880-4576-8772-da70e0d8f99d-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.980690 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:05:55 crc kubenswrapper[4998]: E0203 07:05:55.981253 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="520a4ac6-b880-4576-8772-da70e0d8f99d" containerName="placement-db-sync" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.981286 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="520a4ac6-b880-4576-8772-da70e0d8f99d" containerName="placement-db-sync" Feb 03 07:05:55 crc kubenswrapper[4998]: E0203 07:05:55.981300 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5aa646b-a693-467e-80fc-9b5afbbb0897" containerName="init" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.981306 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5aa646b-a693-467e-80fc-9b5afbbb0897" containerName="init" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.981473 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="520a4ac6-b880-4576-8772-da70e0d8f99d" containerName="placement-db-sync" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.981489 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5aa646b-a693-467e-80fc-9b5afbbb0897" containerName="init" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.982378 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.993050 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 03 07:05:55 crc kubenswrapper[4998]: I0203 07:05:55.993247 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.006850 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.022233 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.022737 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.075951 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" event={"ID":"2c32a76b-d239-4e97-a16a-23423046cbec","Type":"ContainerStarted","Data":"f4bbf5f6172b802a40070734cdca3dd49669f9da168945d7336ed7ba5d49e1cd"} Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.080503 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.090816 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.092723 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.099301 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" podStartSLOduration=3.099282347 podStartE2EDuration="3.099282347s" podCreationTimestamp="2026-02-03 07:05:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:05:56.094603613 +0000 UTC m=+1194.381297429" watchObservedRunningTime="2026-02-03 07:05:56.099282347 +0000 UTC m=+1194.385976153" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.099862 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" event={"ID":"f5aa646b-a693-467e-80fc-9b5afbbb0897","Type":"ContainerDied","Data":"a8487a5b7b721b6ee195b2628e91d4d277b91293489d3622ffc9a1d0097341fd"} Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.099898 4998 scope.go:117] "RemoveContainer" containerID="d886674aabcde3a104ab8aaac89b96b03826227d109e838b463242abf99a6299" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.100045 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d9cfb7bf-tm9cn" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.108588 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sstns" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.109802 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sstns" event={"ID":"520a4ac6-b880-4576-8772-da70e0d8f99d","Type":"ContainerDied","Data":"da41190e010466ea20f309848574b1a313fe643ba2ce1aa607c8bfa5a61b632e"} Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.109868 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da41190e010466ea20f309848574b1a313fe643ba2ce1aa607c8bfa5a61b632e" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.109900 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.110537 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147309 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147445 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147619 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147675 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147744 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nt4x\" (UniqueName: \"kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.147845 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.219941 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.221710 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.224212 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.224347 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.224514 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.224592 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.224656 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pflnk" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.251867 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.251955 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.251984 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.252035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nt4x\" (UniqueName: \"kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.252077 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.252136 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.252179 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.261191 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.261237 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.268429 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.270612 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.271477 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.278830 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.300703 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nt4x\" (UniqueName: \"kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.324048 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.324682 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs\") pod \"neutron-6b56d78fc-pz4b8\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.324886 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76d9cfb7bf-tm9cn"] Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.363837 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.363908 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.363978 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.364196 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.364240 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.364359 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.364395 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4kj8\" (UniqueName: \"kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.364880 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.452766 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5aa646b-a693-467e-80fc-9b5afbbb0897" path="/var/lib/kubelet/pods/f5aa646b-a693-467e-80fc-9b5afbbb0897/volumes" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466069 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466196 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466223 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466277 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466299 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4kj8\" (UniqueName: \"kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466367 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.466386 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.467144 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.477152 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.477630 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.478351 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.480020 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.493878 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.508585 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4kj8\" (UniqueName: \"kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8\") pod \"placement-56ff8c5b4b-c52fv\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:56 crc kubenswrapper[4998]: I0203 07:05:56.548985 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.130926 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.131191 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kl26j" event={"ID":"ca949bbc-e75e-48f4-80f2-825ec09184a3","Type":"ContainerDied","Data":"106195e9611ed3e3e54d6e04474be446c76494674225111a7afe122926773b9a"} Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.131393 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="106195e9611ed3e3e54d6e04474be446c76494674225111a7afe122926773b9a" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.144203 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234141 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234228 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234333 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234411 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234442 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbmjl\" (UniqueName: \"kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.234467 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data\") pod \"ca949bbc-e75e-48f4-80f2-825ec09184a3\" (UID: \"ca949bbc-e75e-48f4-80f2-825ec09184a3\") " Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.242128 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.242215 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.242454 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl" (OuterVolumeSpecName: "kube-api-access-jbmjl") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "kube-api-access-jbmjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.248940 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts" (OuterVolumeSpecName: "scripts") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.267740 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.292346 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data" (OuterVolumeSpecName: "config-data") pod "ca949bbc-e75e-48f4-80f2-825ec09184a3" (UID: "ca949bbc-e75e-48f4-80f2-825ec09184a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336539 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336573 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336585 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336596 4998 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336907 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbmjl\" (UniqueName: \"kubernetes.io/projected/ca949bbc-e75e-48f4-80f2-825ec09184a3-kube-api-access-jbmjl\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.336921 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca949bbc-e75e-48f4-80f2-825ec09184a3-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:05:58 crc kubenswrapper[4998]: E0203 07:05:58.526481 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca949bbc_e75e_48f4_80f2_825ec09184a3.slice/crio-106195e9611ed3e3e54d6e04474be446c76494674225111a7afe122926773b9a\": RecentStats: unable to find data in memory cache]" Feb 03 07:05:58 crc kubenswrapper[4998]: I0203 07:05:58.747045 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.138316 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.138316 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kl26j" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.238712 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:05:59 crc kubenswrapper[4998]: E0203 07:05:59.239466 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca949bbc-e75e-48f4-80f2-825ec09184a3" containerName="keystone-bootstrap" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.239486 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca949bbc-e75e-48f4-80f2-825ec09184a3" containerName="keystone-bootstrap" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.239753 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca949bbc-e75e-48f4-80f2-825ec09184a3" containerName="keystone-bootstrap" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.240599 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.243627 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.243867 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.245056 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.245102 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-k4sf9" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.245190 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.245501 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.254510 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.360860 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.360936 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.360957 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.361005 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.361035 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.361078 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.361095 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.361128 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzss2\" (UniqueName: \"kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.387486 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463069 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463131 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463199 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463219 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463337 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzss2\" (UniqueName: \"kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463461 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463505 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.463521 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.469250 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.469708 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.483854 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.483987 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.484530 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.492164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.500851 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.505256 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzss2\" (UniqueName: \"kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2\") pod \"keystone-6c7dff8f57-z8bvm\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:05:59 crc kubenswrapper[4998]: I0203 07:05:59.557151 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.379652 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.481621 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data\") pod \"537b4446-c59e-4c79-9f65-2221ddb6783c\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.482681 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle\") pod \"537b4446-c59e-4c79-9f65-2221ddb6783c\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.482759 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b472\" (UniqueName: \"kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472\") pod \"537b4446-c59e-4c79-9f65-2221ddb6783c\" (UID: \"537b4446-c59e-4c79-9f65-2221ddb6783c\") " Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.491208 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472" (OuterVolumeSpecName: "kube-api-access-5b472") pod "537b4446-c59e-4c79-9f65-2221ddb6783c" (UID: "537b4446-c59e-4c79-9f65-2221ddb6783c"). InnerVolumeSpecName "kube-api-access-5b472". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.502538 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "537b4446-c59e-4c79-9f65-2221ddb6783c" (UID: "537b4446-c59e-4c79-9f65-2221ddb6783c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.527094 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "537b4446-c59e-4c79-9f65-2221ddb6783c" (UID: "537b4446-c59e-4c79-9f65-2221ddb6783c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.587261 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b472\" (UniqueName: \"kubernetes.io/projected/537b4446-c59e-4c79-9f65-2221ddb6783c-kube-api-access-5b472\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.587291 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.587507 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537b4446-c59e-4c79-9f65-2221ddb6783c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.961953 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:06:00 crc kubenswrapper[4998]: I0203 07:06:00.982186 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.008400 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.156561 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jgnc7" event={"ID":"537b4446-c59e-4c79-9f65-2221ddb6783c","Type":"ContainerDied","Data":"65e590cab571b3f3e33f594b9845c350650b69fef34e3e6c8418172f7de310de"} Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.156864 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65e590cab571b3f3e33f594b9845c350650b69fef34e3e6c8418172f7de310de" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.156598 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jgnc7" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.159236 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerStarted","Data":"14252a855cdaa683d962e21a2ec14dba71a07c75a9f969bdf7e7faddc4d626b2"} Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.161065 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerStarted","Data":"3e801fbf6cba0fb9db79a48ad21a7ce71845cdd7210b0c8221c56cba49b062a5"} Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.163111 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerStarted","Data":"a8c483f337f76684e42e564cab7e204c56a9f3950b1f997c59a7fcf99e259eba"} Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.164441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c7dff8f57-z8bvm" event={"ID":"4e13372a-d92b-4928-9e27-c1422d685e05","Type":"ContainerStarted","Data":"7d46b3dc28148a42a558f264dfe35b76a89f9cf679104f7bafabfce67f6febc6"} Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.641879 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:06:01 crc kubenswrapper[4998]: E0203 07:06:01.642296 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537b4446-c59e-4c79-9f65-2221ddb6783c" containerName="barbican-db-sync" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.642312 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="537b4446-c59e-4c79-9f65-2221ddb6783c" containerName="barbican-db-sync" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.642536 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="537b4446-c59e-4c79-9f65-2221ddb6783c" containerName="barbican-db-sync" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.643870 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.647765 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.647945 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jfbpm" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.648081 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.694890 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.711691 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.712090 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfjfq\" (UniqueName: \"kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.712125 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.712163 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.712195 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.731042 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.732826 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.734899 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.748046 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.771130 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.771449 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="dnsmasq-dns" containerID="cri-o://f4bbf5f6172b802a40070734cdca3dd49669f9da168945d7336ed7ba5d49e1cd" gracePeriod=10 Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.773937 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815264 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815324 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ft5n\" (UniqueName: \"kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815361 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815424 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfjfq\" (UniqueName: \"kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815446 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815475 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815506 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815533 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815552 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.815582 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.827401 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.831970 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.863515 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.868686 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.871458 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.874616 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.897582 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.917350 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ft5n\" (UniqueName: \"kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.917421 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.917528 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.917563 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.917586 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.951389 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfjfq\" (UniqueName: \"kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq\") pod \"barbican-worker-78755df597-h9t98\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.959261 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.962079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.969821 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.975192 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:01 crc kubenswrapper[4998]: I0203 07:06:01.987220 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021040 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021100 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021151 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021581 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7nft\" (UniqueName: \"kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021648 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.021706 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.024566 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ft5n\" (UniqueName: \"kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n\") pod \"barbican-keystone-listener-5d94bbfff8-q2v5c\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.039684 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.041243 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.046135 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.064969 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.082085 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.083281 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123222 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123306 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123380 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75grb\" (UniqueName: \"kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123414 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7nft\" (UniqueName: \"kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123445 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123481 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123525 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123577 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123631 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123677 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.123719 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.124817 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.126181 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.126829 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.127347 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.128225 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.137045 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.145949 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.150850 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7nft\" (UniqueName: \"kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft\") pod \"dnsmasq-dns-54c675984c-nl7km\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.162301 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.215205 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerStarted","Data":"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.215254 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerStarted","Data":"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.215339 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.226592 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.226709 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75grb\" (UniqueName: \"kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.226789 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.226946 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.226999 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.234504 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.235245 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.236211 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.237989 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c7dff8f57-z8bvm" event={"ID":"4e13372a-d92b-4928-9e27-c1422d685e05","Type":"ContainerStarted","Data":"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.238391 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.239911 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.244179 4998 generic.go:334] "Generic (PLEG): container finished" podID="2c32a76b-d239-4e97-a16a-23423046cbec" containerID="f4bbf5f6172b802a40070734cdca3dd49669f9da168945d7336ed7ba5d49e1cd" exitCode=0 Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.244266 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" event={"ID":"2c32a76b-d239-4e97-a16a-23423046cbec","Type":"ContainerDied","Data":"f4bbf5f6172b802a40070734cdca3dd49669f9da168945d7336ed7ba5d49e1cd"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.254623 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b56d78fc-pz4b8" podStartSLOduration=7.254601829 podStartE2EDuration="7.254601829s" podCreationTimestamp="2026-02-03 07:05:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:02.244649203 +0000 UTC m=+1200.531343029" watchObservedRunningTime="2026-02-03 07:06:02.254601829 +0000 UTC m=+1200.541295635" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.265619 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75grb\" (UniqueName: \"kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb\") pod \"barbican-api-54565899c6-9xvmq\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.288833 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerStarted","Data":"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.288869 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerStarted","Data":"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b"} Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.288882 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.289011 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.289022 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.289032 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.317924 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6c7dff8f57-z8bvm" podStartSLOduration=3.317907003 podStartE2EDuration="3.317907003s" podCreationTimestamp="2026-02-03 07:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:02.281423837 +0000 UTC m=+1200.568117663" watchObservedRunningTime="2026-02-03 07:06:02.317907003 +0000 UTC m=+1200.604600809" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.349874 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.370835 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.430878 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.488966 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-56ff8c5b4b-c52fv" podStartSLOduration=6.488945795 podStartE2EDuration="6.488945795s" podCreationTimestamp="2026-02-03 07:05:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:02.339214254 +0000 UTC m=+1200.625908070" watchObservedRunningTime="2026-02-03 07:06:02.488945795 +0000 UTC m=+1200.775639601" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533337 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533402 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533504 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533537 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533576 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbpc7\" (UniqueName: \"kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.533686 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0\") pod \"2c32a76b-d239-4e97-a16a-23423046cbec\" (UID: \"2c32a76b-d239-4e97-a16a-23423046cbec\") " Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.551854 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7" (OuterVolumeSpecName: "kube-api-access-zbpc7") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "kube-api-access-zbpc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.607971 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.609896 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.610150 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config" (OuterVolumeSpecName: "config") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.633385 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636013 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636043 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636055 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbpc7\" (UniqueName: \"kubernetes.io/projected/2c32a76b-d239-4e97-a16a-23423046cbec-kube-api-access-zbpc7\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636064 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636073 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.636157 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c32a76b-d239-4e97-a16a-23423046cbec" (UID: "2c32a76b-d239-4e97-a16a-23423046cbec"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.703698 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.741252 4998 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c32a76b-d239-4e97-a16a-23423046cbec-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.808845 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.941875 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:02 crc kubenswrapper[4998]: W0203 07:06:02.976137 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d2b544_495e_4a19_8e08_e8db2febc192.slice/crio-2a676f5971ff7673d2ac8c59959bbd70d088790340fde9aa331abf94d0f47ea2 WatchSource:0}: Error finding container 2a676f5971ff7673d2ac8c59959bbd70d088790340fde9aa331abf94d0f47ea2: Status 404 returned error can't find the container with id 2a676f5971ff7673d2ac8c59959bbd70d088790340fde9aa331abf94d0f47ea2 Feb 03 07:06:02 crc kubenswrapper[4998]: I0203 07:06:02.994683 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.303273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerStarted","Data":"ed5c7edf59f7b745ecdcf9391b4cc4768f798369d1d2c58753eec22b448e5b42"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.311173 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerStarted","Data":"853d99251509041b74e3f3b997fb67cbc9cf4092af979f6b11aae04063d8580d"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.314872 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerStarted","Data":"e6e565e7b2825f64b9d6b31d9cdfea00a9798fb078cc6b9306fd6159666192a4"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.314914 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerStarted","Data":"fe0bea9ef3463fd3de58dfc5c344c558cd6043259f6ed5d16826828d149a8ffb"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.321607 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" event={"ID":"2c32a76b-d239-4e97-a16a-23423046cbec","Type":"ContainerDied","Data":"84917370cc27dbc3c0a3310d9f28b8946d30936ba5aa42fa7acfe761ff151b50"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.321655 4998 scope.go:117] "RemoveContainer" containerID="f4bbf5f6172b802a40070734cdca3dd49669f9da168945d7336ed7ba5d49e1cd" Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.321701 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86dd6887f5-rkh8v" Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.333838 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerStarted","Data":"4131c8021bbec37dec0f55996271d827fd62a36b338125a98ea4b0973576292b"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.333895 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerStarted","Data":"2a676f5971ff7673d2ac8c59959bbd70d088790340fde9aa331abf94d0f47ea2"} Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.378093 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.384854 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86dd6887f5-rkh8v"] Feb 03 07:06:03 crc kubenswrapper[4998]: I0203 07:06:03.434547 4998 scope.go:117] "RemoveContainer" containerID="46b567c279100add9fdeae949e5c76a2d251f70543b418fd3c7e656effeac64d" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.348374 4998 generic.go:334] "Generic (PLEG): container finished" podID="00d2b544-495e-4a19-8e08-e8db2febc192" containerID="4131c8021bbec37dec0f55996271d827fd62a36b338125a98ea4b0973576292b" exitCode=0 Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.348542 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerDied","Data":"4131c8021bbec37dec0f55996271d827fd62a36b338125a98ea4b0973576292b"} Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.348730 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerStarted","Data":"b8da9999ae8f2365a3d69654fadb0686756b35412902d6e30a822d8dc46aa177"} Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.348783 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.351582 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerStarted","Data":"c4b30d8a4a4c03acc7a63bc85d77639a31ae3f991e2dd9396a41b474190b9a3c"} Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.351730 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.352652 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.352668 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.364169 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:06:04 crc kubenswrapper[4998]: E0203 07:06:04.364525 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="init" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.364541 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="init" Feb 03 07:06:04 crc kubenswrapper[4998]: E0203 07:06:04.364567 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="dnsmasq-dns" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.364573 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="dnsmasq-dns" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.364733 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" containerName="dnsmasq-dns" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.365640 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.368303 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.368583 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.390775 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.392035 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54c675984c-nl7km" podStartSLOduration=3.392023897 podStartE2EDuration="3.392023897s" podCreationTimestamp="2026-02-03 07:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:04.375901195 +0000 UTC m=+1202.662595021" watchObservedRunningTime="2026-02-03 07:06:04.392023897 +0000 UTC m=+1202.678717693" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.436445 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54565899c6-9xvmq" podStartSLOduration=3.436400969 podStartE2EDuration="3.436400969s" podCreationTimestamp="2026-02-03 07:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:04.430015466 +0000 UTC m=+1202.716709272" watchObservedRunningTime="2026-02-03 07:06:04.436400969 +0000 UTC m=+1202.723094795" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.448034 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c32a76b-d239-4e97-a16a-23423046cbec" path="/var/lib/kubelet/pods/2c32a76b-d239-4e97-a16a-23423046cbec/volumes" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.474779 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.474913 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.474944 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.475563 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.475601 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6t2n\" (UniqueName: \"kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.475635 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.475664 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576641 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576692 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576735 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576780 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576860 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576965 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.576984 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6t2n\" (UniqueName: \"kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.578347 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.594147 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.594202 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.594787 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.596277 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.599075 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.608491 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6t2n\" (UniqueName: \"kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n\") pod \"barbican-api-747cb48568-vkq22\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.690217 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.833112 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:04 crc kubenswrapper[4998]: I0203 07:06:04.841389 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:06:05 crc kubenswrapper[4998]: I0203 07:06:05.363190 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.197588 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.455805 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerStarted","Data":"59b03e0b0b59b7355fdc4f5e7fb6fc2a865970667616376aeba4c7993df8fea4"} Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.456125 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerStarted","Data":"cabb5051e92ea728b58b2e5f6c017812f04640a06b66da9b92064814d7881f01"} Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.496458 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" podStartSLOduration=2.679033173 podStartE2EDuration="5.496414889s" podCreationTimestamp="2026-02-03 07:06:01 +0000 UTC" firstStartedPulling="2026-02-03 07:06:02.82721693 +0000 UTC m=+1201.113910736" lastFinishedPulling="2026-02-03 07:06:05.644598656 +0000 UTC m=+1203.931292452" observedRunningTime="2026-02-03 07:06:06.493108085 +0000 UTC m=+1204.779801901" watchObservedRunningTime="2026-02-03 07:06:06.496414889 +0000 UTC m=+1204.783108715" Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.508246 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerStarted","Data":"5b99f558d74ddfe530b75fa587287403c988c189202261dc57a486311499d5c9"} Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.508299 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerStarted","Data":"e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8"} Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.555418 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-78755df597-h9t98" podStartSLOduration=2.63218335 podStartE2EDuration="5.55540394s" podCreationTimestamp="2026-02-03 07:06:01 +0000 UTC" firstStartedPulling="2026-02-03 07:06:02.719727849 +0000 UTC m=+1201.006421655" lastFinishedPulling="2026-02-03 07:06:05.642948439 +0000 UTC m=+1203.929642245" observedRunningTime="2026-02-03 07:06:06.542430548 +0000 UTC m=+1204.829124354" watchObservedRunningTime="2026-02-03 07:06:06.55540394 +0000 UTC m=+1204.842097746" Feb 03 07:06:06 crc kubenswrapper[4998]: I0203 07:06:06.560423 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerStarted","Data":"47d1fe608f0400993fa03cdad2f24f3a0453bce91ace38179532262a167a10fa"} Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.573696 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-47p8b" event={"ID":"0e86027e-f5e6-40ba-af5a-275b9087dcfd","Type":"ContainerStarted","Data":"9ced9e4baa28785964c8c8ba5e7e0a08637bc094e3f3ac77a9a28b2e25f37501"} Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.577888 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerStarted","Data":"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e"} Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.577924 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerStarted","Data":"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585"} Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.577936 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.578860 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.599573 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-47p8b" podStartSLOduration=3.315231513 podStartE2EDuration="42.599550636s" podCreationTimestamp="2026-02-03 07:05:25 +0000 UTC" firstStartedPulling="2026-02-03 07:05:26.775685609 +0000 UTC m=+1165.062379415" lastFinishedPulling="2026-02-03 07:06:06.060004732 +0000 UTC m=+1204.346698538" observedRunningTime="2026-02-03 07:06:07.598678861 +0000 UTC m=+1205.885372687" watchObservedRunningTime="2026-02-03 07:06:07.599550636 +0000 UTC m=+1205.886244442" Feb 03 07:06:07 crc kubenswrapper[4998]: I0203 07:06:07.642664 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-747cb48568-vkq22" podStartSLOduration=3.642642811 podStartE2EDuration="3.642642811s" podCreationTimestamp="2026-02-03 07:06:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:07.626003394 +0000 UTC m=+1205.912697220" watchObservedRunningTime="2026-02-03 07:06:07.642642811 +0000 UTC m=+1205.929336617" Feb 03 07:06:10 crc kubenswrapper[4998]: I0203 07:06:10.006026 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:06:11 crc kubenswrapper[4998]: I0203 07:06:11.613426 4998 generic.go:334] "Generic (PLEG): container finished" podID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" containerID="9ced9e4baa28785964c8c8ba5e7e0a08637bc094e3f3ac77a9a28b2e25f37501" exitCode=0 Feb 03 07:06:11 crc kubenswrapper[4998]: I0203 07:06:11.613509 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-47p8b" event={"ID":"0e86027e-f5e6-40ba-af5a-275b9087dcfd","Type":"ContainerDied","Data":"9ced9e4baa28785964c8c8ba5e7e0a08637bc094e3f3ac77a9a28b2e25f37501"} Feb 03 07:06:11 crc kubenswrapper[4998]: E0203 07:06:11.796856 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.352884 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.403043 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.407194 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="dnsmasq-dns" containerID="cri-o://7b08ef25e1c94f34d90530a9b857312bd2d8527a7054c341dcb83d1f3b5a95f9" gracePeriod=10 Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.641016 4998 generic.go:334] "Generic (PLEG): container finished" podID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerID="7b08ef25e1c94f34d90530a9b857312bd2d8527a7054c341dcb83d1f3b5a95f9" exitCode=0 Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.641155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" event={"ID":"478afdb4-cf55-4653-b5d3-1a81fbfd1833","Type":"ContainerDied","Data":"7b08ef25e1c94f34d90530a9b857312bd2d8527a7054c341dcb83d1f3b5a95f9"} Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.649411 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="ceilometer-notification-agent" containerID="cri-o://9944aec61ca304a54ffb18464be33901737c98924ce176c77f6e3a6e417f0ecd" gracePeriod=30 Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.649654 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerStarted","Data":"9851e0dbf72af679ac6715f0170289f2d037d02cdc54cd4630ff7d4cf5df39f4"} Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.649700 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.649824 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="sg-core" containerID="cri-o://3e801fbf6cba0fb9db79a48ad21a7ce71845cdd7210b0c8221c56cba49b062a5" gracePeriod=30 Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.649846 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="proxy-httpd" containerID="cri-o://9851e0dbf72af679ac6715f0170289f2d037d02cdc54cd4630ff7d4cf5df39f4" gracePeriod=30 Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.754171 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.754218 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.754252 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.754951 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:06:12 crc kubenswrapper[4998]: I0203 07:06:12.754995 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d" gracePeriod=600 Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.144034 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.176365 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-47p8b" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.288441 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb\") pod \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.289984 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290136 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb\") pod \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290259 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwd2q\" (UniqueName: \"kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290373 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc\") pod \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290504 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config\") pod \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290648 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290838 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.290975 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9vk7\" (UniqueName: \"kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7\") pod \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\" (UID: \"478afdb4-cf55-4653-b5d3-1a81fbfd1833\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.291067 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle\") pod \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\" (UID: \"0e86027e-f5e6-40ba-af5a-275b9087dcfd\") " Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.292893 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.296929 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7" (OuterVolumeSpecName: "kube-api-access-z9vk7") pod "478afdb4-cf55-4653-b5d3-1a81fbfd1833" (UID: "478afdb4-cf55-4653-b5d3-1a81fbfd1833"). InnerVolumeSpecName "kube-api-access-z9vk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.296919 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts" (OuterVolumeSpecName: "scripts") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.297242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q" (OuterVolumeSpecName: "kube-api-access-mwd2q") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "kube-api-access-mwd2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.298124 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.344956 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.346186 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "478afdb4-cf55-4653-b5d3-1a81fbfd1833" (UID: "478afdb4-cf55-4653-b5d3-1a81fbfd1833"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.351227 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "478afdb4-cf55-4653-b5d3-1a81fbfd1833" (UID: "478afdb4-cf55-4653-b5d3-1a81fbfd1833"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.366002 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "478afdb4-cf55-4653-b5d3-1a81fbfd1833" (UID: "478afdb4-cf55-4653-b5d3-1a81fbfd1833"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.369455 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config" (OuterVolumeSpecName: "config") pod "478afdb4-cf55-4653-b5d3-1a81fbfd1833" (UID: "478afdb4-cf55-4653-b5d3-1a81fbfd1833"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.390392 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data" (OuterVolumeSpecName: "config-data") pod "0e86027e-f5e6-40ba-af5a-275b9087dcfd" (UID: "0e86027e-f5e6-40ba-af5a-275b9087dcfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393518 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393631 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393704 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e86027e-f5e6-40ba-af5a-275b9087dcfd-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393803 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393889 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9vk7\" (UniqueName: \"kubernetes.io/projected/478afdb4-cf55-4653-b5d3-1a81fbfd1833-kube-api-access-z9vk7\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.393947 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.394009 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.394069 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e86027e-f5e6-40ba-af5a-275b9087dcfd-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.394120 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.394170 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwd2q\" (UniqueName: \"kubernetes.io/projected/0e86027e-f5e6-40ba-af5a-275b9087dcfd-kube-api-access-mwd2q\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.394221 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/478afdb4-cf55-4653-b5d3-1a81fbfd1833-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.661690 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d" exitCode=0 Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.661750 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.661774 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.661811 4998 scope.go:117] "RemoveContainer" containerID="bb96b94f015f59d0a44787640ad68f2b0d9538506248aed8ff45ab4238aa5576" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.665058 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" event={"ID":"478afdb4-cf55-4653-b5d3-1a81fbfd1833","Type":"ContainerDied","Data":"50d8a2b2523f67ca51e04015e2f068c8ed3dc3660439a767c2d5c3abd3545eb9"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.665155 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6844ff7b9c-r9dfn" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.669643 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-47p8b" event={"ID":"0e86027e-f5e6-40ba-af5a-275b9087dcfd","Type":"ContainerDied","Data":"e1a84369220e245b316cabc375b526f990bb18ec9cec340ac25b3c8e6b5bfe0b"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.669680 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1a84369220e245b316cabc375b526f990bb18ec9cec340ac25b3c8e6b5bfe0b" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.669736 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-47p8b" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.684602 4998 generic.go:334] "Generic (PLEG): container finished" podID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerID="9851e0dbf72af679ac6715f0170289f2d037d02cdc54cd4630ff7d4cf5df39f4" exitCode=0 Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.684630 4998 generic.go:334] "Generic (PLEG): container finished" podID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerID="3e801fbf6cba0fb9db79a48ad21a7ce71845cdd7210b0c8221c56cba49b062a5" exitCode=2 Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.684648 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerDied","Data":"9851e0dbf72af679ac6715f0170289f2d037d02cdc54cd4630ff7d4cf5df39f4"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.684673 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerDied","Data":"3e801fbf6cba0fb9db79a48ad21a7ce71845cdd7210b0c8221c56cba49b062a5"} Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.717730 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.731229 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6844ff7b9c-r9dfn"] Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.762267 4998 scope.go:117] "RemoveContainer" containerID="7b08ef25e1c94f34d90530a9b857312bd2d8527a7054c341dcb83d1f3b5a95f9" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.802597 4998 scope.go:117] "RemoveContainer" containerID="0404b0c2abdf867dc35bc044f80762eddbc1b305fa8acc2d4e9280149ceafc26" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.952779 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:13 crc kubenswrapper[4998]: E0203 07:06:13.953227 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" containerName="cinder-db-sync" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.953246 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" containerName="cinder-db-sync" Feb 03 07:06:13 crc kubenswrapper[4998]: E0203 07:06:13.953264 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="init" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.953271 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="init" Feb 03 07:06:13 crc kubenswrapper[4998]: E0203 07:06:13.953293 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="dnsmasq-dns" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.953300 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="dnsmasq-dns" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.953516 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" containerName="cinder-db-sync" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.953542 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" containerName="dnsmasq-dns" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.955062 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.973055 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.979819 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.980081 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kk4f4" Feb 03 07:06:13 crc kubenswrapper[4998]: I0203 07:06:13.996035 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.004331 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.062960 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.064872 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113693 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113823 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113853 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113892 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113943 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.113984 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldngz\" (UniqueName: \"kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.116384 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.220671 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.220935 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldngz\" (UniqueName: \"kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221029 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221057 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221080 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221138 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221195 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qtqm\" (UniqueName: \"kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221257 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221285 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221319 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221350 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221376 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.221574 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.238742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.243318 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.246266 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.249564 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.270431 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldngz\" (UniqueName: \"kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz\") pod \"cinder-scheduler-0\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.290303 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322687 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322782 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322822 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qtqm\" (UniqueName: \"kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322873 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.322899 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.323798 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.324301 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.324801 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.325822 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.330387 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.371615 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qtqm\" (UniqueName: \"kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm\") pod \"dnsmasq-dns-849fd69845-bz7pb\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.391918 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.393855 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.400092 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.410608 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.422231 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.450846 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="478afdb4-cf55-4653-b5d3-1a81fbfd1833" path="/var/lib/kubelet/pods/478afdb4-cf55-4653-b5d3-1a81fbfd1833/volumes" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527001 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527126 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527217 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527234 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527279 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527293 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.527309 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rczdg\" (UniqueName: \"kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.613737 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629098 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629222 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629238 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629257 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczdg\" (UniqueName: \"kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629309 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.629398 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.630305 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.630372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.640404 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.641592 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.654369 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.656305 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.658680 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczdg\" (UniqueName: \"kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg\") pod \"cinder-api-0\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.720936 4998 generic.go:334] "Generic (PLEG): container finished" podID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerID="9944aec61ca304a54ffb18464be33901737c98924ce176c77f6e3a6e417f0ecd" exitCode=0 Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.720982 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerDied","Data":"9944aec61ca304a54ffb18464be33901737c98924ce176c77f6e3a6e417f0ecd"} Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.724243 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:14 crc kubenswrapper[4998]: I0203 07:06:14.996627 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.028340 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.189975 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355177 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355229 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355292 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355325 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355359 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355384 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.355412 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf82r\" (UniqueName: \"kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r\") pod \"500ab31f-c75b-4a96-afa1-56f868909ecb\" (UID: \"500ab31f-c75b-4a96-afa1-56f868909ecb\") " Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.357077 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.359116 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.365989 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r" (OuterVolumeSpecName: "kube-api-access-rf82r") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "kube-api-access-rf82r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.373262 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts" (OuterVolumeSpecName: "scripts") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.399531 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.405855 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.411564 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.412791 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.472528 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.473915 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.473937 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.473949 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.473961 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/500ab31f-c75b-4a96-afa1-56f868909ecb-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.473972 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf82r\" (UniqueName: \"kubernetes.io/projected/500ab31f-c75b-4a96-afa1-56f868909ecb-kube-api-access-rf82r\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.482138 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data" (OuterVolumeSpecName: "config-data") pod "500ab31f-c75b-4a96-afa1-56f868909ecb" (UID: "500ab31f-c75b-4a96-afa1-56f868909ecb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.574902 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500ab31f-c75b-4a96-afa1-56f868909ecb-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.732732 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerStarted","Data":"3402adf87548c1e53149adebc7d3a011a4628158f4276d0252cb9c2ab9fb66e5"} Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.736488 4998 generic.go:334] "Generic (PLEG): container finished" podID="be1884cc-0fc6-4769-a362-5d66be382f11" containerID="a15f2ca3a4307e219840bd2fea5f854f35d3e08e1aa50784744aa891be10aa54" exitCode=0 Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.736579 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" event={"ID":"be1884cc-0fc6-4769-a362-5d66be382f11","Type":"ContainerDied","Data":"a15f2ca3a4307e219840bd2fea5f854f35d3e08e1aa50784744aa891be10aa54"} Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.736630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" event={"ID":"be1884cc-0fc6-4769-a362-5d66be382f11","Type":"ContainerStarted","Data":"0befc134c682f11b572ccd527b6e65ab83d3ff579ab7940871fbca0d84437032"} Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.740677 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"500ab31f-c75b-4a96-afa1-56f868909ecb","Type":"ContainerDied","Data":"4733dbedb9e09764ff35d83aa08770d01e8d249a6ff1bb9f7bcae1fb97503817"} Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.740715 4998 scope.go:117] "RemoveContainer" containerID="9851e0dbf72af679ac6715f0170289f2d037d02cdc54cd4630ff7d4cf5df39f4" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.740821 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.768939 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerStarted","Data":"0b22735f7f0ba8bdb4331de1a8be023a2df81358865d1e1c03a0dd33b7357152"} Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.864375 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.960422 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.978475 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:15 crc kubenswrapper[4998]: E0203 07:06:15.979401 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="ceilometer-notification-agent" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.979431 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="ceilometer-notification-agent" Feb 03 07:06:15 crc kubenswrapper[4998]: E0203 07:06:15.979475 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="proxy-httpd" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.979485 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="proxy-httpd" Feb 03 07:06:15 crc kubenswrapper[4998]: E0203 07:06:15.979536 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="sg-core" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.979551 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="sg-core" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.980553 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="proxy-httpd" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.980588 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="sg-core" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.980618 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" containerName="ceilometer-notification-agent" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.987076 4998 scope.go:117] "RemoveContainer" containerID="3e801fbf6cba0fb9db79a48ad21a7ce71845cdd7210b0c8221c56cba49b062a5" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.987313 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:15 crc kubenswrapper[4998]: I0203 07:06:15.999009 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.010809 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.011013 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053438 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053559 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdqlh\" (UniqueName: \"kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053602 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053680 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053752 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.053907 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.156632 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdqlh\" (UniqueName: \"kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157125 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157206 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157247 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157300 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157323 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.157347 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.160418 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.162016 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.162769 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.171493 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.172474 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.172608 4998 scope.go:117] "RemoveContainer" containerID="9944aec61ca304a54ffb18464be33901737c98924ce176c77f6e3a6e417f0ecd" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.173969 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.180399 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdqlh\" (UniqueName: \"kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh\") pod \"ceilometer-0\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.440158 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.449765 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500ab31f-c75b-4a96-afa1-56f868909ecb" path="/var/lib/kubelet/pods/500ab31f-c75b-4a96-afa1-56f868909ecb/volumes" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.452238 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.797960 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerStarted","Data":"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c"} Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.799253 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" event={"ID":"be1884cc-0fc6-4769-a362-5d66be382f11","Type":"ContainerStarted","Data":"6fe430dd90d58b91605ceada00d3f048616c4e1503817e5ff5e42627acce92af"} Feb 03 07:06:16 crc kubenswrapper[4998]: I0203 07:06:16.800436 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.127638 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" podStartSLOduration=4.127617301 podStartE2EDuration="4.127617301s" podCreationTimestamp="2026-02-03 07:06:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:16.82821394 +0000 UTC m=+1215.114907746" watchObservedRunningTime="2026-02-03 07:06:17.127617301 +0000 UTC m=+1215.414311107" Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.146740 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.174926 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.268074 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.268496 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" containerID="cri-o://e6e565e7b2825f64b9d6b31d9cdfea00a9798fb078cc6b9306fd6159666192a4" gracePeriod=30 Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.268582 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" containerID="cri-o://c4b30d8a4a4c03acc7a63bc85d77639a31ae3f991e2dd9396a41b474190b9a3c" gracePeriod=30 Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.280705 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.663400 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.825424 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerStarted","Data":"feeef7a04bccde203705fb90b95c9f4cee56a292e7f552ee550ae34f48057297"} Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.831269 4998 generic.go:334] "Generic (PLEG): container finished" podID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerID="e6e565e7b2825f64b9d6b31d9cdfea00a9798fb078cc6b9306fd6159666192a4" exitCode=143 Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.831330 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerDied","Data":"e6e565e7b2825f64b9d6b31d9cdfea00a9798fb078cc6b9306fd6159666192a4"} Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.833220 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerStarted","Data":"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e"} Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.833608 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api-log" containerID="cri-o://385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c" gracePeriod=30 Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.833697 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.833973 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api" containerID="cri-o://776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e" gracePeriod=30 Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.848290 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerStarted","Data":"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e"} Feb 03 07:06:17 crc kubenswrapper[4998]: I0203 07:06:17.859053 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.859034174 podStartE2EDuration="3.859034174s" podCreationTimestamp="2026-02-03 07:06:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:17.855206054 +0000 UTC m=+1216.141899870" watchObservedRunningTime="2026-02-03 07:06:17.859034174 +0000 UTC m=+1216.145727980" Feb 03 07:06:18 crc kubenswrapper[4998]: I0203 07:06:18.890412 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerStarted","Data":"276b273455e2be4c64a0b5e8c8bc0ac251a5dc82b27ff29f590dd638ceae4342"} Feb 03 07:06:18 crc kubenswrapper[4998]: I0203 07:06:18.909750 4998 generic.go:334] "Generic (PLEG): container finished" podID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerID="385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c" exitCode=143 Feb 03 07:06:18 crc kubenswrapper[4998]: I0203 07:06:18.910187 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerDied","Data":"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c"} Feb 03 07:06:18 crc kubenswrapper[4998]: I0203 07:06:18.916328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerStarted","Data":"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf"} Feb 03 07:06:18 crc kubenswrapper[4998]: I0203 07:06:18.957412 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.990324075 podStartE2EDuration="5.957389702s" podCreationTimestamp="2026-02-03 07:06:13 +0000 UTC" firstStartedPulling="2026-02-03 07:06:15.026981965 +0000 UTC m=+1213.313675771" lastFinishedPulling="2026-02-03 07:06:15.994047592 +0000 UTC m=+1214.280741398" observedRunningTime="2026-02-03 07:06:18.94754016 +0000 UTC m=+1217.234233986" watchObservedRunningTime="2026-02-03 07:06:18.957389702 +0000 UTC m=+1217.244083508" Feb 03 07:06:19 crc kubenswrapper[4998]: I0203 07:06:19.291191 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.762360 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:60106->10.217.0.159:9311: read: connection reset by peer" Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.762361 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54565899c6-9xvmq" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:60112->10.217.0.159:9311: read: connection reset by peer" Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.940872 4998 generic.go:334] "Generic (PLEG): container finished" podID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerID="c4b30d8a4a4c03acc7a63bc85d77639a31ae3f991e2dd9396a41b474190b9a3c" exitCode=0 Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.940946 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerDied","Data":"c4b30d8a4a4c03acc7a63bc85d77639a31ae3f991e2dd9396a41b474190b9a3c"} Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.950539 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerStarted","Data":"d9ed4bfead5f617e4d802af2a38ec102e9c352fa063909946be14e658336963b"} Feb 03 07:06:20 crc kubenswrapper[4998]: I0203 07:06:20.950572 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerStarted","Data":"e7f498d8f6533ce735923bd13aec435c837c30da33742475080c9a11194c318d"} Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.210342 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.363204 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom\") pod \"612d7bc1-c52d-4507-a8be-5e35d017be5d\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.363574 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle\") pod \"612d7bc1-c52d-4507-a8be-5e35d017be5d\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.363591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data\") pod \"612d7bc1-c52d-4507-a8be-5e35d017be5d\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.363628 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs\") pod \"612d7bc1-c52d-4507-a8be-5e35d017be5d\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.363652 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75grb\" (UniqueName: \"kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb\") pod \"612d7bc1-c52d-4507-a8be-5e35d017be5d\" (UID: \"612d7bc1-c52d-4507-a8be-5e35d017be5d\") " Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.364363 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs" (OuterVolumeSpecName: "logs") pod "612d7bc1-c52d-4507-a8be-5e35d017be5d" (UID: "612d7bc1-c52d-4507-a8be-5e35d017be5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.374932 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "612d7bc1-c52d-4507-a8be-5e35d017be5d" (UID: "612d7bc1-c52d-4507-a8be-5e35d017be5d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.377806 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb" (OuterVolumeSpecName: "kube-api-access-75grb") pod "612d7bc1-c52d-4507-a8be-5e35d017be5d" (UID: "612d7bc1-c52d-4507-a8be-5e35d017be5d"). InnerVolumeSpecName "kube-api-access-75grb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.391271 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "612d7bc1-c52d-4507-a8be-5e35d017be5d" (UID: "612d7bc1-c52d-4507-a8be-5e35d017be5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.420245 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data" (OuterVolumeSpecName: "config-data") pod "612d7bc1-c52d-4507-a8be-5e35d017be5d" (UID: "612d7bc1-c52d-4507-a8be-5e35d017be5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.465809 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612d7bc1-c52d-4507-a8be-5e35d017be5d-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.465842 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75grb\" (UniqueName: \"kubernetes.io/projected/612d7bc1-c52d-4507-a8be-5e35d017be5d-kube-api-access-75grb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.465853 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.465861 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.465872 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612d7bc1-c52d-4507-a8be-5e35d017be5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.960960 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54565899c6-9xvmq" event={"ID":"612d7bc1-c52d-4507-a8be-5e35d017be5d","Type":"ContainerDied","Data":"fe0bea9ef3463fd3de58dfc5c344c558cd6043259f6ed5d16826828d149a8ffb"} Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.960999 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54565899c6-9xvmq" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.961013 4998 scope.go:117] "RemoveContainer" containerID="c4b30d8a4a4c03acc7a63bc85d77639a31ae3f991e2dd9396a41b474190b9a3c" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.984375 4998 scope.go:117] "RemoveContainer" containerID="e6e565e7b2825f64b9d6b31d9cdfea00a9798fb078cc6b9306fd6159666192a4" Feb 03 07:06:21 crc kubenswrapper[4998]: I0203 07:06:21.996529 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:22 crc kubenswrapper[4998]: I0203 07:06:22.024719 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-54565899c6-9xvmq"] Feb 03 07:06:22 crc kubenswrapper[4998]: I0203 07:06:22.440771 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" path="/var/lib/kubelet/pods/612d7bc1-c52d-4507-a8be-5e35d017be5d/volumes" Feb 03 07:06:22 crc kubenswrapper[4998]: I0203 07:06:22.971432 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerStarted","Data":"d06782cbd10ecb30fa6f586c2374c1f4483c16ae1abd0337db2c800abbcc66c9"} Feb 03 07:06:22 crc kubenswrapper[4998]: I0203 07:06:22.971973 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.541496 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.576287 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.236501032 podStartE2EDuration="8.576248129s" podCreationTimestamp="2026-02-03 07:06:15 +0000 UTC" firstStartedPulling="2026-02-03 07:06:17.159998399 +0000 UTC m=+1215.446692205" lastFinishedPulling="2026-02-03 07:06:22.499745496 +0000 UTC m=+1220.786439302" observedRunningTime="2026-02-03 07:06:23.003505314 +0000 UTC m=+1221.290199160" watchObservedRunningTime="2026-02-03 07:06:23.576248129 +0000 UTC m=+1221.862941935" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.796206 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.796480 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b56d78fc-pz4b8" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-api" containerID="cri-o://eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b" gracePeriod=30 Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.796918 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b56d78fc-pz4b8" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" containerID="cri-o://8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7" gracePeriod=30 Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.827174 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:06:23 crc kubenswrapper[4998]: E0203 07:06:23.827981 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.828071 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" Feb 03 07:06:23 crc kubenswrapper[4998]: E0203 07:06:23.828143 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.828195 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.828409 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api-log" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.828499 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="612d7bc1-c52d-4507-a8be-5e35d017be5d" containerName="barbican-api" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.829695 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.845718 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.902419 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6b56d78fc-pz4b8" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": read tcp 10.217.0.2:58702->10.217.0.153:9696: read: connection reset by peer" Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.992017 4998 generic.go:334] "Generic (PLEG): container finished" podID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerID="8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7" exitCode=0 Feb 03 07:06:23 crc kubenswrapper[4998]: I0203 07:06:23.992125 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerDied","Data":"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7"} Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022444 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022507 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvgmm\" (UniqueName: \"kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022535 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022556 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022591 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022604 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.022685 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.124935 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125018 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvgmm\" (UniqueName: \"kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125063 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125086 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125135 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125157 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.125279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.130899 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.131193 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.131244 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.131353 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.131570 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.146347 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.153167 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvgmm\" (UniqueName: \"kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm\") pod \"neutron-594c6c97c7-9bqhd\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.424019 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.457976 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.516190 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.516458 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54c675984c-nl7km" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="dnsmasq-dns" containerID="cri-o://b8da9999ae8f2365a3d69654fadb0686756b35412902d6e30a822d8dc46aa177" gracePeriod=10 Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.518650 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 07:06:24 crc kubenswrapper[4998]: I0203 07:06:24.604574 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.005312 4998 generic.go:334] "Generic (PLEG): container finished" podID="00d2b544-495e-4a19-8e08-e8db2febc192" containerID="b8da9999ae8f2365a3d69654fadb0686756b35412902d6e30a822d8dc46aa177" exitCode=0 Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.005582 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="cinder-scheduler" containerID="cri-o://09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e" gracePeriod=30 Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.005748 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerDied","Data":"b8da9999ae8f2365a3d69654fadb0686756b35412902d6e30a822d8dc46aa177"} Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.005810 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="probe" containerID="cri-o://37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf" gracePeriod=30 Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.107370 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.296636 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353351 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7nft\" (UniqueName: \"kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353419 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353503 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353558 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353600 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.353629 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config\") pod \"00d2b544-495e-4a19-8e08-e8db2febc192\" (UID: \"00d2b544-495e-4a19-8e08-e8db2febc192\") " Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.369105 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft" (OuterVolumeSpecName: "kube-api-access-j7nft") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "kube-api-access-j7nft". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.403456 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.405952 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.415325 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.425026 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config" (OuterVolumeSpecName: "config") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.429249 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "00d2b544-495e-4a19-8e08-e8db2febc192" (UID: "00d2b544-495e-4a19-8e08-e8db2febc192"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455078 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455120 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455133 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455144 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455156 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7nft\" (UniqueName: \"kubernetes.io/projected/00d2b544-495e-4a19-8e08-e8db2febc192-kube-api-access-j7nft\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:25 crc kubenswrapper[4998]: I0203 07:06:25.455170 4998 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/00d2b544-495e-4a19-8e08-e8db2febc192-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.014895 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerStarted","Data":"ac85fb57d382b091b0ec3df49b55d69c4fe4553fabf64a2bf4ffa916b408f777"} Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.016280 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerStarted","Data":"f6359e2d805c5da84deb6d70b3a2cf1b151546db9cb67ce6157717737983ed9d"} Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.016376 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerStarted","Data":"d866bdc02d6dd3fc8a64812f15757edbc7ccd910d2969ec706979857d66307c1"} Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.016459 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.017235 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c675984c-nl7km" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.017244 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c675984c-nl7km" event={"ID":"00d2b544-495e-4a19-8e08-e8db2febc192","Type":"ContainerDied","Data":"2a676f5971ff7673d2ac8c59959bbd70d088790340fde9aa331abf94d0f47ea2"} Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.017452 4998 scope.go:117] "RemoveContainer" containerID="b8da9999ae8f2365a3d69654fadb0686756b35412902d6e30a822d8dc46aa177" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.019445 4998 generic.go:334] "Generic (PLEG): container finished" podID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerID="37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf" exitCode=0 Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.019479 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerDied","Data":"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf"} Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.042525 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-594c6c97c7-9bqhd" podStartSLOduration=3.042500162 podStartE2EDuration="3.042500162s" podCreationTimestamp="2026-02-03 07:06:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:26.031331951 +0000 UTC m=+1224.318025757" watchObservedRunningTime="2026-02-03 07:06:26.042500162 +0000 UTC m=+1224.329193968" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.046806 4998 scope.go:117] "RemoveContainer" containerID="4131c8021bbec37dec0f55996271d827fd62a36b338125a98ea4b0973576292b" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.064954 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.074311 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54c675984c-nl7km"] Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.366459 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6b56d78fc-pz4b8" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": dial tcp 10.217.0.153:9696: connect: connection refused" Feb 03 07:06:26 crc kubenswrapper[4998]: I0203 07:06:26.439420 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" path="/var/lib/kubelet/pods/00d2b544-495e-4a19-8e08-e8db2febc192/volumes" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.214051 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.720295 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.801489 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.801556 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.802740 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.802847 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.802898 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.802975 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nt4x\" (UniqueName: \"kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.803055 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle\") pod \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\" (UID: \"279352fa-2ab8-4816-ae30-37c7cf2f97ec\") " Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.809966 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x" (OuterVolumeSpecName: "kube-api-access-6nt4x") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "kube-api-access-6nt4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.833951 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.899074 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.909325 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.909351 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.909363 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nt4x\" (UniqueName: \"kubernetes.io/projected/279352fa-2ab8-4816-ae30-37c7cf2f97ec-kube-api-access-6nt4x\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.918161 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.926869 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.928877 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.932897 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config" (OuterVolumeSpecName: "config") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:27 crc kubenswrapper[4998]: I0203 07:06:27.993072 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "279352fa-2ab8-4816-ae30-37c7cf2f97ec" (UID: "279352fa-2ab8-4816-ae30-37c7cf2f97ec"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.011799 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.011836 4998 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.011849 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.011861 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279352fa-2ab8-4816-ae30-37c7cf2f97ec-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.052002 4998 generic.go:334] "Generic (PLEG): container finished" podID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerID="eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b" exitCode=0 Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.052972 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56d78fc-pz4b8" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.059860 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerDied","Data":"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b"} Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.059927 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56d78fc-pz4b8" event={"ID":"279352fa-2ab8-4816-ae30-37c7cf2f97ec","Type":"ContainerDied","Data":"a8c483f337f76684e42e564cab7e204c56a9f3950b1f997c59a7fcf99e259eba"} Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.059948 4998 scope.go:117] "RemoveContainer" containerID="8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.081314 4998 scope.go:117] "RemoveContainer" containerID="eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.120866 4998 scope.go:117] "RemoveContainer" containerID="8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7" Feb 03 07:06:28 crc kubenswrapper[4998]: E0203 07:06:28.123184 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7\": container with ID starting with 8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7 not found: ID does not exist" containerID="8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.123224 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7"} err="failed to get container status \"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7\": rpc error: code = NotFound desc = could not find container \"8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7\": container with ID starting with 8705db3ca6fa78c8dfd60501654b6690ab858f596ea50c8b59bd5797f6b8f4f7 not found: ID does not exist" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.123253 4998 scope.go:117] "RemoveContainer" containerID="eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b" Feb 03 07:06:28 crc kubenswrapper[4998]: E0203 07:06:28.123474 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b\": container with ID starting with eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b not found: ID does not exist" containerID="eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.123502 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b"} err="failed to get container status \"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b\": rpc error: code = NotFound desc = could not find container \"eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b\": container with ID starting with eec1ec0874728148f23cbae4eee334076a34779b6923eff41f7134e28944a52b not found: ID does not exist" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.126834 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.144573 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6b56d78fc-pz4b8"] Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.437760 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" path="/var/lib/kubelet/pods/279352fa-2ab8-4816-ae30-37c7cf2f97ec/volumes" Feb 03 07:06:28 crc kubenswrapper[4998]: I0203 07:06:28.912458 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.141388 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:06:29 crc kubenswrapper[4998]: E0203 07:06:29.141844 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="dnsmasq-dns" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.141866 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="dnsmasq-dns" Feb 03 07:06:29 crc kubenswrapper[4998]: E0203 07:06:29.141879 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.141886 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" Feb 03 07:06:29 crc kubenswrapper[4998]: E0203 07:06:29.141900 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-api" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.141908 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-api" Feb 03 07:06:29 crc kubenswrapper[4998]: E0203 07:06:29.141920 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="init" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.141927 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="init" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.142118 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-httpd" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.142138 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="279352fa-2ab8-4816-ae30-37c7cf2f97ec" containerName="neutron-api" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.142163 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="00d2b544-495e-4a19-8e08-e8db2febc192" containerName="dnsmasq-dns" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.143387 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.169396 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235029 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235071 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235094 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235124 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235310 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z2fv\" (UniqueName: \"kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235533 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.235554 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.336903 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337272 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337301 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337340 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337383 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z2fv\" (UniqueName: \"kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337474 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337499 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.337803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.342093 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.346003 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.349491 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.350817 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.352460 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.354426 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z2fv\" (UniqueName: \"kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv\") pod \"placement-55ccbc8794-9m7vc\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.546372 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.633490 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645175 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645247 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645314 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645433 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645482 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldngz\" (UniqueName: \"kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.645506 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts\") pod \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\" (UID: \"f41c3046-76ec-4c0a-a3bb-1f48e6115507\") " Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.647145 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.654685 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.657521 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts" (OuterVolumeSpecName: "scripts") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.661009 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz" (OuterVolumeSpecName: "kube-api-access-ldngz") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "kube-api-access-ldngz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.726708 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.747140 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldngz\" (UniqueName: \"kubernetes.io/projected/f41c3046-76ec-4c0a-a3bb-1f48e6115507-kube-api-access-ldngz\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.747176 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.747188 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.747196 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.747203 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f41c3046-76ec-4c0a-a3bb-1f48e6115507-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.775996 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data" (OuterVolumeSpecName: "config-data") pod "f41c3046-76ec-4c0a-a3bb-1f48e6115507" (UID: "f41c3046-76ec-4c0a-a3bb-1f48e6115507"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:29 crc kubenswrapper[4998]: I0203 07:06:29.848236 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41c3046-76ec-4c0a-a3bb-1f48e6115507-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.030955 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.072847 4998 generic.go:334] "Generic (PLEG): container finished" podID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerID="09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e" exitCode=0 Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.072916 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerDied","Data":"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e"} Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.072944 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f41c3046-76ec-4c0a-a3bb-1f48e6115507","Type":"ContainerDied","Data":"0b22735f7f0ba8bdb4331de1a8be023a2df81358865d1e1c03a0dd33b7357152"} Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.072960 4998 scope.go:117] "RemoveContainer" containerID="37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.074170 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.079941 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerStarted","Data":"fb5dd646f9bb3c40a0c2cd2e6254048d8003d080acb3ca4b420968ebd8b882ce"} Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.116928 4998 scope.go:117] "RemoveContainer" containerID="09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.124427 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.170848 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.175820 4998 scope.go:117] "RemoveContainer" containerID="37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf" Feb 03 07:06:30 crc kubenswrapper[4998]: E0203 07:06:30.179330 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf\": container with ID starting with 37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf not found: ID does not exist" containerID="37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.179377 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf"} err="failed to get container status \"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf\": rpc error: code = NotFound desc = could not find container \"37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf\": container with ID starting with 37a88c371d600608a1c28bac5963c3f7e2116377ddc51da14cc9710f2759dcaf not found: ID does not exist" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.179405 4998 scope.go:117] "RemoveContainer" containerID="09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e" Feb 03 07:06:30 crc kubenswrapper[4998]: E0203 07:06:30.181998 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e\": container with ID starting with 09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e not found: ID does not exist" containerID="09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.182059 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e"} err="failed to get container status \"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e\": rpc error: code = NotFound desc = could not find container \"09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e\": container with ID starting with 09a3d5babb3a21ce940580118c5d861f90b5adb3f7d68c536fc501ee43ed649e not found: ID does not exist" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.191059 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:30 crc kubenswrapper[4998]: E0203 07:06:30.191435 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="cinder-scheduler" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.191450 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="cinder-scheduler" Feb 03 07:06:30 crc kubenswrapper[4998]: E0203 07:06:30.191467 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="probe" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.191473 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="probe" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.191630 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="cinder-scheduler" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.191655 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" containerName="probe" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.192732 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.195472 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.201712 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.256656 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.256807 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.256848 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.257007 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krqn4\" (UniqueName: \"kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.257072 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.257151 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.358994 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359088 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359116 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359173 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krqn4\" (UniqueName: \"kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359200 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359234 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.359906 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.364462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.364559 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.376423 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krqn4\" (UniqueName: \"kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.382400 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.383416 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.438903 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f41c3046-76ec-4c0a-a3bb-1f48e6115507" path="/var/lib/kubelet/pods/f41c3046-76ec-4c0a-a3bb-1f48e6115507/volumes" Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.472198 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:06:30 crc kubenswrapper[4998]: W0203 07:06:30.938185 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecd25f56_731a_4b58_837d_7d81dc9f595e.slice/crio-276eff2f6bb6b7fb95a43ddaf758b53f20b3bd86a8e91a55f102a2e3396c6b9e WatchSource:0}: Error finding container 276eff2f6bb6b7fb95a43ddaf758b53f20b3bd86a8e91a55f102a2e3396c6b9e: Status 404 returned error can't find the container with id 276eff2f6bb6b7fb95a43ddaf758b53f20b3bd86a8e91a55f102a2e3396c6b9e Feb 03 07:06:30 crc kubenswrapper[4998]: I0203 07:06:30.938463 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.092827 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerStarted","Data":"115a3825a27cab7fdfb490f904a7aeed06539b9f2cfa910218ec08fed15d9afa"} Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.092871 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerStarted","Data":"dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb"} Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.093028 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.093203 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.095221 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerStarted","Data":"276eff2f6bb6b7fb95a43ddaf758b53f20b3bd86a8e91a55f102a2e3396c6b9e"} Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.130180 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-55ccbc8794-9m7vc" podStartSLOduration=2.130163295 podStartE2EDuration="2.130163295s" podCreationTimestamp="2026-02-03 07:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:31.123261137 +0000 UTC m=+1229.409954983" watchObservedRunningTime="2026-02-03 07:06:31.130163295 +0000 UTC m=+1229.416857101" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.347654 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.950681 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.952630 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.956057 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.956135 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.958235 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-slx49" Feb 03 07:06:31 crc kubenswrapper[4998]: I0203 07:06:31.984727 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.097941 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv7gq\" (UniqueName: \"kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.098253 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.098352 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.098643 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.107165 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerStarted","Data":"b9cf13c79eeb6224fa7a06ecf85b9c0950a6c413aa4a0ee378c2547496f98817"} Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.200074 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.200428 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.201172 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.201246 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.201474 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv7gq\" (UniqueName: \"kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.205924 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.206034 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.226656 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv7gq\" (UniqueName: \"kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq\") pod \"openstackclient\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.293253 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.294061 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.305868 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.395860 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.398103 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.423688 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.507357 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.507422 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.507596 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r2st\" (UniqueName: \"kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.507639 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: E0203 07:06:32.510605 4998 log.go:32] "RunPodSandbox from runtime service failed" err=< Feb 03 07:06:32 crc kubenswrapper[4998]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e9fe061b-61bf-4fa2-88dd-a45fb2799369_0(c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc" Netns:"/var/run/netns/0bfe778e-d571-43a7-a9e2-e75a1cc6ef52" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc;K8S_POD_UID=e9fe061b-61bf-4fa2-88dd-a45fb2799369" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e9fe061b-61bf-4fa2-88dd-a45fb2799369]: expected pod UID "e9fe061b-61bf-4fa2-88dd-a45fb2799369" but got "58c44471-d442-4736-a649-c762a1c893fa" from Kube API Feb 03 07:06:32 crc kubenswrapper[4998]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:06:32 crc kubenswrapper[4998]: > Feb 03 07:06:32 crc kubenswrapper[4998]: E0203 07:06:32.510700 4998 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Feb 03 07:06:32 crc kubenswrapper[4998]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e9fe061b-61bf-4fa2-88dd-a45fb2799369_0(c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc" Netns:"/var/run/netns/0bfe778e-d571-43a7-a9e2-e75a1cc6ef52" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=c859afba4364b7497680692c0efafe30249c8164cd5bc1d3ff5d706e265ab0bc;K8S_POD_UID=e9fe061b-61bf-4fa2-88dd-a45fb2799369" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e9fe061b-61bf-4fa2-88dd-a45fb2799369]: expected pod UID "e9fe061b-61bf-4fa2-88dd-a45fb2799369" but got "58c44471-d442-4736-a649-c762a1c893fa" from Kube API Feb 03 07:06:32 crc kubenswrapper[4998]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:06:32 crc kubenswrapper[4998]: > pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.609635 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r2st\" (UniqueName: \"kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.609949 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.610167 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.610257 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.611870 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.614393 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.615127 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.631244 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r2st\" (UniqueName: \"kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st\") pod \"openstackclient\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " pod="openstack/openstackclient" Feb 03 07:06:32 crc kubenswrapper[4998]: I0203 07:06:32.749909 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.119691 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.120076 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerStarted","Data":"232bbb0f6faa06716e49e70604c13f8b54cf7576fa7c9e4b6b3cd621731e2ac5"} Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.142287 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.145265 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e9fe061b-61bf-4fa2-88dd-a45fb2799369" podUID="58c44471-d442-4736-a649-c762a1c893fa" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.147057 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.147041611 podStartE2EDuration="3.147041611s" podCreationTimestamp="2026-02-03 07:06:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:33.138120646 +0000 UTC m=+1231.424814472" watchObservedRunningTime="2026-02-03 07:06:33.147041611 +0000 UTC m=+1231.433735407" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.211836 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 07:06:33 crc kubenswrapper[4998]: W0203 07:06:33.215585 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58c44471_d442_4736_a649_c762a1c893fa.slice/crio-698e8214e182d5e029cff4fab0bf5177e2efe31408ba433e37cae205125efc01 WatchSource:0}: Error finding container 698e8214e182d5e029cff4fab0bf5177e2efe31408ba433e37cae205125efc01: Status 404 returned error can't find the container with id 698e8214e182d5e029cff4fab0bf5177e2efe31408ba433e37cae205125efc01 Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.218486 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret\") pod \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.218629 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv7gq\" (UniqueName: \"kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq\") pod \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.218766 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config\") pod \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.218918 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle\") pod \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\" (UID: \"e9fe061b-61bf-4fa2-88dd-a45fb2799369\") " Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.219274 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e9fe061b-61bf-4fa2-88dd-a45fb2799369" (UID: "e9fe061b-61bf-4fa2-88dd-a45fb2799369"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.219502 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.225852 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq" (OuterVolumeSpecName: "kube-api-access-rv7gq") pod "e9fe061b-61bf-4fa2-88dd-a45fb2799369" (UID: "e9fe061b-61bf-4fa2-88dd-a45fb2799369"). InnerVolumeSpecName "kube-api-access-rv7gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.225876 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9fe061b-61bf-4fa2-88dd-a45fb2799369" (UID: "e9fe061b-61bf-4fa2-88dd-a45fb2799369"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.225982 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e9fe061b-61bf-4fa2-88dd-a45fb2799369" (UID: "e9fe061b-61bf-4fa2-88dd-a45fb2799369"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.321543 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv7gq\" (UniqueName: \"kubernetes.io/projected/e9fe061b-61bf-4fa2-88dd-a45fb2799369-kube-api-access-rv7gq\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.321595 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:33 crc kubenswrapper[4998]: I0203 07:06:33.321609 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e9fe061b-61bf-4fa2-88dd-a45fb2799369-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:34 crc kubenswrapper[4998]: I0203 07:06:34.129505 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58c44471-d442-4736-a649-c762a1c893fa","Type":"ContainerStarted","Data":"698e8214e182d5e029cff4fab0bf5177e2efe31408ba433e37cae205125efc01"} Feb 03 07:06:34 crc kubenswrapper[4998]: I0203 07:06:34.129721 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:06:34 crc kubenswrapper[4998]: I0203 07:06:34.141408 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e9fe061b-61bf-4fa2-88dd-a45fb2799369" podUID="58c44471-d442-4736-a649-c762a1c893fa" Feb 03 07:06:34 crc kubenswrapper[4998]: I0203 07:06:34.447996 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9fe061b-61bf-4fa2-88dd-a45fb2799369" path="/var/lib/kubelet/pods/e9fe061b-61bf-4fa2-88dd-a45fb2799369/volumes" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.473655 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.788482 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-87bbz"] Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.789956 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.801996 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-87bbz"] Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.881347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vjpw\" (UniqueName: \"kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.881537 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.983033 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.983109 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vjpw\" (UniqueName: \"kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:35 crc kubenswrapper[4998]: I0203 07:06:35.984095 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.014357 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vjpw\" (UniqueName: \"kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw\") pod \"nova-api-db-create-87bbz\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.073970 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-d2c0-account-create-update-d7mgx"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.075526 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.077331 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.087241 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-d7mgx"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.112340 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-88lhc"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.113807 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.113927 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.135246 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-88lhc"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.188903 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.188966 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpp7f\" (UniqueName: \"kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.189002 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.189084 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgw9x\" (UniqueName: \"kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.198927 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5hsmh"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.201941 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.220070 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5hsmh"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.290891 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgw9x\" (UniqueName: \"kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.291245 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58x8p\" (UniqueName: \"kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.291284 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.291354 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpp7f\" (UniqueName: \"kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.291392 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.291434 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.292857 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.296833 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.315514 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-65da-account-create-update-6hsv7"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.316900 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.321196 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.324566 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpp7f\" (UniqueName: \"kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f\") pod \"nova-cell0-db-create-88lhc\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.328353 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgw9x\" (UniqueName: \"kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x\") pod \"nova-api-d2c0-account-create-update-d7mgx\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.347953 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-6hsv7"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.393160 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.393928 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.393229 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6rsc\" (UniqueName: \"kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.394049 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.394126 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58x8p\" (UniqueName: \"kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.396495 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.430003 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.433992 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58x8p\" (UniqueName: \"kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p\") pod \"nova-cell1-db-create-5hsmh\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.496277 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6rsc\" (UniqueName: \"kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.496720 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.501578 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.502545 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9147-account-create-update-75s8j"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.503891 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.506171 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.529093 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6rsc\" (UniqueName: \"kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc\") pod \"nova-cell0-65da-account-create-update-6hsv7\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.533874 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-75s8j"] Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.580093 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.598668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5698\" (UniqueName: \"kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.598725 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.701113 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5698\" (UniqueName: \"kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.701385 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.702126 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.726928 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5698\" (UniqueName: \"kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698\") pod \"nova-cell1-9147-account-create-update-75s8j\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.731395 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.731610 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-87bbz"] Feb 03 07:06:36 crc kubenswrapper[4998]: W0203 07:06:36.757452 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3621768_9f07_459e_9d47_afd14d36900f.slice/crio-d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d WatchSource:0}: Error finding container d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d: Status 404 returned error can't find the container with id d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d Feb 03 07:06:36 crc kubenswrapper[4998]: I0203 07:06:36.829294 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.090568 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-88lhc"] Feb 03 07:06:37 crc kubenswrapper[4998]: W0203 07:06:37.097429 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d815447_a2cd_470c_84d9_431e9971596a.slice/crio-f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb WatchSource:0}: Error finding container f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb: Status 404 returned error can't find the container with id f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.145720 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-d7mgx"] Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.163767 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5hsmh"] Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.185978 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-87bbz" event={"ID":"c3621768-9f07-459e-9d47-afd14d36900f","Type":"ContainerStarted","Data":"edcf28cbacee4673a08a4cd9b9c43c8f3f2b423ab921a62e22cbcc434d14d66c"} Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.186018 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-87bbz" event={"ID":"c3621768-9f07-459e-9d47-afd14d36900f","Type":"ContainerStarted","Data":"d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d"} Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.223520 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-88lhc" event={"ID":"2d815447-a2cd-470c-84d9-431e9971596a","Type":"ContainerStarted","Data":"f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb"} Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.348399 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-87bbz" podStartSLOduration=2.34838178 podStartE2EDuration="2.34838178s" podCreationTimestamp="2026-02-03 07:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:37.217297474 +0000 UTC m=+1235.503991290" watchObservedRunningTime="2026-02-03 07:06:37.34838178 +0000 UTC m=+1235.635075586" Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.354529 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-6hsv7"] Feb 03 07:06:37 crc kubenswrapper[4998]: I0203 07:06:37.444493 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-75s8j"] Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.161367 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.163517 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.168640 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.168733 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.168868 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.179378 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.232676 4998 generic.go:334] "Generic (PLEG): container finished" podID="2d815447-a2cd-470c-84d9-431e9971596a" containerID="97552a93fea48430bfcc329b6698e838a5b3ebfaa2c584142d4f3931b752d8a6" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.232744 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-88lhc" event={"ID":"2d815447-a2cd-470c-84d9-431e9971596a","Type":"ContainerDied","Data":"97552a93fea48430bfcc329b6698e838a5b3ebfaa2c584142d4f3931b752d8a6"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.234603 4998 generic.go:334] "Generic (PLEG): container finished" podID="c3621768-9f07-459e-9d47-afd14d36900f" containerID="edcf28cbacee4673a08a4cd9b9c43c8f3f2b423ab921a62e22cbcc434d14d66c" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.234650 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-87bbz" event={"ID":"c3621768-9f07-459e-9d47-afd14d36900f","Type":"ContainerDied","Data":"edcf28cbacee4673a08a4cd9b9c43c8f3f2b423ab921a62e22cbcc434d14d66c"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.236841 4998 generic.go:334] "Generic (PLEG): container finished" podID="f1ff745e-64b7-4f3d-8cf0-69dd338f1996" containerID="6a8eba1308322b6d1ba927de343d503c99e8ffd925e44e0eeea6bef5b588cd4e" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.236920 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hsmh" event={"ID":"f1ff745e-64b7-4f3d-8cf0-69dd338f1996","Type":"ContainerDied","Data":"6a8eba1308322b6d1ba927de343d503c99e8ffd925e44e0eeea6bef5b588cd4e"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.236945 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hsmh" event={"ID":"f1ff745e-64b7-4f3d-8cf0-69dd338f1996","Type":"ContainerStarted","Data":"f22b4f763f747fbf5f8553dec3cbbf25b9dd846120cb7895345102b5689981f6"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243261 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243280 4998 generic.go:334] "Generic (PLEG): container finished" podID="5919579f-48cb-4d2f-94b6-6277af39d739" containerID="55db44c6ca814e57400d077a9a3a1ad5d68bd4413af30e653e178427453327c3" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243353 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9147-account-create-update-75s8j" event={"ID":"5919579f-48cb-4d2f-94b6-6277af39d739","Type":"ContainerDied","Data":"55db44c6ca814e57400d077a9a3a1ad5d68bd4413af30e653e178427453327c3"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243299 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243378 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9147-account-create-update-75s8j" event={"ID":"5919579f-48cb-4d2f-94b6-6277af39d739","Type":"ContainerStarted","Data":"ca975df52a83466ccaf4bbe1ca5fe24469e6717d900f88520f19d253b59166b2"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243581 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243653 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tjkz\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243702 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.243797 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.244510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.244702 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.245517 4998 generic.go:334] "Generic (PLEG): container finished" podID="fa049756-1151-4b26-8f4a-669b4bf01ac3" containerID="d8080e0f2ff14ade20f4e6cadd0a135007e045e6b2a43803d336fb15c52053a1" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.245562 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" event={"ID":"fa049756-1151-4b26-8f4a-669b4bf01ac3","Type":"ContainerDied","Data":"d8080e0f2ff14ade20f4e6cadd0a135007e045e6b2a43803d336fb15c52053a1"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.245589 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" event={"ID":"fa049756-1151-4b26-8f4a-669b4bf01ac3","Type":"ContainerStarted","Data":"efd02664585361346757e0b4e5d7f4b6b66f239eb7ae57039deae4049ad2a994"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.255203 4998 generic.go:334] "Generic (PLEG): container finished" podID="5888222d-3a6a-4e9c-8dd2-543d2029826e" containerID="f564ff5de6b936819397c766913066c33690833aafd594b13d539dbbfd6cdeff" exitCode=0 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.255261 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" event={"ID":"5888222d-3a6a-4e9c-8dd2-543d2029826e","Type":"ContainerDied","Data":"f564ff5de6b936819397c766913066c33690833aafd594b13d539dbbfd6cdeff"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.255292 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" event={"ID":"5888222d-3a6a-4e9c-8dd2-543d2029826e","Type":"ContainerStarted","Data":"700bf450c20d4d3fda24fb4e9c2760f5743ea62e5741c7ac9386560f63aceb68"} Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346070 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346154 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346237 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346261 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346283 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346366 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346398 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tjkz\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346423 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.346812 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.355605 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.356789 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.357552 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.358286 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.363652 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.370922 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tjkz\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz\") pod \"swift-proxy-d9fff78f-v2ccx\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.486340 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.778055 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.778650 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-central-agent" containerID="cri-o://276b273455e2be4c64a0b5e8c8bc0ac251a5dc82b27ff29f590dd638ceae4342" gracePeriod=30 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.779425 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="proxy-httpd" containerID="cri-o://d06782cbd10ecb30fa6f586c2374c1f4483c16ae1abd0337db2c800abbcc66c9" gracePeriod=30 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.779507 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="sg-core" containerID="cri-o://d9ed4bfead5f617e4d802af2a38ec102e9c352fa063909946be14e658336963b" gracePeriod=30 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.779559 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-notification-agent" containerID="cri-o://e7f498d8f6533ce735923bd13aec435c837c30da33742475080c9a11194c318d" gracePeriod=30 Feb 03 07:06:38 crc kubenswrapper[4998]: I0203 07:06:38.791228 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.080947 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.267520 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerID="d06782cbd10ecb30fa6f586c2374c1f4483c16ae1abd0337db2c800abbcc66c9" exitCode=0 Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.267558 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerID="d9ed4bfead5f617e4d802af2a38ec102e9c352fa063909946be14e658336963b" exitCode=2 Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.267601 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerDied","Data":"d06782cbd10ecb30fa6f586c2374c1f4483c16ae1abd0337db2c800abbcc66c9"} Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.267629 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerDied","Data":"d9ed4bfead5f617e4d802af2a38ec102e9c352fa063909946be14e658336963b"} Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.269903 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerStarted","Data":"bf548f38c34190d5c5768d28a8c422c39447b2fdc2bef928978e7995fbe7ceec"} Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.627375 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.683722 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6rsc\" (UniqueName: \"kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc\") pod \"fa049756-1151-4b26-8f4a-669b4bf01ac3\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.683861 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts\") pod \"fa049756-1151-4b26-8f4a-669b4bf01ac3\" (UID: \"fa049756-1151-4b26-8f4a-669b4bf01ac3\") " Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.684854 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa049756-1151-4b26-8f4a-669b4bf01ac3" (UID: "fa049756-1151-4b26-8f4a-669b4bf01ac3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.689011 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc" (OuterVolumeSpecName: "kube-api-access-q6rsc") pod "fa049756-1151-4b26-8f4a-669b4bf01ac3" (UID: "fa049756-1151-4b26-8f4a-669b4bf01ac3"). InnerVolumeSpecName "kube-api-access-q6rsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.785742 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6rsc\" (UniqueName: \"kubernetes.io/projected/fa049756-1151-4b26-8f4a-669b4bf01ac3-kube-api-access-q6rsc\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:39 crc kubenswrapper[4998]: I0203 07:06:39.786305 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa049756-1151-4b26-8f4a-669b4bf01ac3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.281497 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.281497 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-65da-account-create-update-6hsv7" event={"ID":"fa049756-1151-4b26-8f4a-669b4bf01ac3","Type":"ContainerDied","Data":"efd02664585361346757e0b4e5d7f4b6b66f239eb7ae57039deae4049ad2a994"} Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.281646 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efd02664585361346757e0b4e5d7f4b6b66f239eb7ae57039deae4049ad2a994" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.285528 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerID="276b273455e2be4c64a0b5e8c8bc0ac251a5dc82b27ff29f590dd638ceae4342" exitCode=0 Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.285599 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerDied","Data":"276b273455e2be4c64a0b5e8c8bc0ac251a5dc82b27ff29f590dd638ceae4342"} Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.287123 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerStarted","Data":"7ac91621be4ad73c5875bb4f2e805c6f9619e9a22ac99cac808fd03dd2ffd425"} Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.287157 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerStarted","Data":"8783d1fdb78c71bed87bdaf085a3d90306855381ef2f20915e65a6d24c97055f"} Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.288462 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.288503 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.327395 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-d9fff78f-v2ccx" podStartSLOduration=2.327376891 podStartE2EDuration="2.327376891s" podCreationTimestamp="2026-02-03 07:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:40.311640791 +0000 UTC m=+1238.598334607" watchObservedRunningTime="2026-02-03 07:06:40.327376891 +0000 UTC m=+1238.614070697" Feb 03 07:06:40 crc kubenswrapper[4998]: I0203 07:06:40.701914 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 07:06:42 crc kubenswrapper[4998]: I0203 07:06:42.313520 4998 generic.go:334] "Generic (PLEG): container finished" podID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerID="e7f498d8f6533ce735923bd13aec435c837c30da33742475080c9a11194c318d" exitCode=0 Feb 03 07:06:42 crc kubenswrapper[4998]: I0203 07:06:42.313579 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerDied","Data":"e7f498d8f6533ce735923bd13aec435c837c30da33742475080c9a11194c318d"} Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.764627 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.806898 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.822666 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.843479 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.855252 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.874936 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.899661 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58x8p\" (UniqueName: \"kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p\") pod \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.899839 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts\") pod \"c3621768-9f07-459e-9d47-afd14d36900f\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.899908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgw9x\" (UniqueName: \"kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x\") pod \"5888222d-3a6a-4e9c-8dd2-543d2029826e\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.899954 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5698\" (UniqueName: \"kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698\") pod \"5919579f-48cb-4d2f-94b6-6277af39d739\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900027 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts\") pod \"2d815447-a2cd-470c-84d9-431e9971596a\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900054 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpp7f\" (UniqueName: \"kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f\") pod \"2d815447-a2cd-470c-84d9-431e9971596a\" (UID: \"2d815447-a2cd-470c-84d9-431e9971596a\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900094 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts\") pod \"5919579f-48cb-4d2f-94b6-6277af39d739\" (UID: \"5919579f-48cb-4d2f-94b6-6277af39d739\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900155 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts\") pod \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\" (UID: \"f1ff745e-64b7-4f3d-8cf0-69dd338f1996\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900206 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts\") pod \"5888222d-3a6a-4e9c-8dd2-543d2029826e\" (UID: \"5888222d-3a6a-4e9c-8dd2-543d2029826e\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.900243 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vjpw\" (UniqueName: \"kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw\") pod \"c3621768-9f07-459e-9d47-afd14d36900f\" (UID: \"c3621768-9f07-459e-9d47-afd14d36900f\") " Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.901562 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d815447-a2cd-470c-84d9-431e9971596a" (UID: "2d815447-a2cd-470c-84d9-431e9971596a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.905613 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f1ff745e-64b7-4f3d-8cf0-69dd338f1996" (UID: "f1ff745e-64b7-4f3d-8cf0-69dd338f1996"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.905664 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5919579f-48cb-4d2f-94b6-6277af39d739" (UID: "5919579f-48cb-4d2f-94b6-6277af39d739"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.905747 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5888222d-3a6a-4e9c-8dd2-543d2029826e" (UID: "5888222d-3a6a-4e9c-8dd2-543d2029826e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.906200 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c3621768-9f07-459e-9d47-afd14d36900f" (UID: "c3621768-9f07-459e-9d47-afd14d36900f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.913924 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p" (OuterVolumeSpecName: "kube-api-access-58x8p") pod "f1ff745e-64b7-4f3d-8cf0-69dd338f1996" (UID: "f1ff745e-64b7-4f3d-8cf0-69dd338f1996"). InnerVolumeSpecName "kube-api-access-58x8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.916945 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698" (OuterVolumeSpecName: "kube-api-access-b5698") pod "5919579f-48cb-4d2f-94b6-6277af39d739" (UID: "5919579f-48cb-4d2f-94b6-6277af39d739"). InnerVolumeSpecName "kube-api-access-b5698". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.917034 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw" (OuterVolumeSpecName: "kube-api-access-7vjpw") pod "c3621768-9f07-459e-9d47-afd14d36900f" (UID: "c3621768-9f07-459e-9d47-afd14d36900f"). InnerVolumeSpecName "kube-api-access-7vjpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.917099 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x" (OuterVolumeSpecName: "kube-api-access-bgw9x") pod "5888222d-3a6a-4e9c-8dd2-543d2029826e" (UID: "5888222d-3a6a-4e9c-8dd2-543d2029826e"). InnerVolumeSpecName "kube-api-access-bgw9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:45 crc kubenswrapper[4998]: I0203 07:06:45.923134 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f" (OuterVolumeSpecName: "kube-api-access-rpp7f") pod "2d815447-a2cd-470c-84d9-431e9971596a" (UID: "2d815447-a2cd-470c-84d9-431e9971596a"). InnerVolumeSpecName "kube-api-access-rpp7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001451 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001517 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001552 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdqlh\" (UniqueName: \"kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001611 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001755 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001807 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.001831 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd\") pod \"c2f02096-123d-472d-b9b8-0083f07a1c39\" (UID: \"c2f02096-123d-472d-b9b8-0083f07a1c39\") " Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002179 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgw9x\" (UniqueName: \"kubernetes.io/projected/5888222d-3a6a-4e9c-8dd2-543d2029826e-kube-api-access-bgw9x\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002196 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5698\" (UniqueName: \"kubernetes.io/projected/5919579f-48cb-4d2f-94b6-6277af39d739-kube-api-access-b5698\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002205 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d815447-a2cd-470c-84d9-431e9971596a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002213 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpp7f\" (UniqueName: \"kubernetes.io/projected/2d815447-a2cd-470c-84d9-431e9971596a-kube-api-access-rpp7f\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002222 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5919579f-48cb-4d2f-94b6-6277af39d739-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002231 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002239 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5888222d-3a6a-4e9c-8dd2-543d2029826e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002249 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vjpw\" (UniqueName: \"kubernetes.io/projected/c3621768-9f07-459e-9d47-afd14d36900f-kube-api-access-7vjpw\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002257 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58x8p\" (UniqueName: \"kubernetes.io/projected/f1ff745e-64b7-4f3d-8cf0-69dd338f1996-kube-api-access-58x8p\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002265 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3621768-9f07-459e-9d47-afd14d36900f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002432 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.002771 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.005085 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh" (OuterVolumeSpecName: "kube-api-access-hdqlh") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "kube-api-access-hdqlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.006065 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts" (OuterVolumeSpecName: "scripts") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.028358 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.079562 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103762 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103820 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103832 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103849 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2f02096-123d-472d-b9b8-0083f07a1c39-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103862 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdqlh\" (UniqueName: \"kubernetes.io/projected/c2f02096-123d-472d-b9b8-0083f07a1c39-kube-api-access-hdqlh\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.103873 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.105396 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data" (OuterVolumeSpecName: "config-data") pod "c2f02096-123d-472d-b9b8-0083f07a1c39" (UID: "c2f02096-123d-472d-b9b8-0083f07a1c39"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.206205 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02096-123d-472d-b9b8-0083f07a1c39-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.347864 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-88lhc" event={"ID":"2d815447-a2cd-470c-84d9-431e9971596a","Type":"ContainerDied","Data":"f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.347923 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f85c1e51bff5deaa720152544f9560ef0cf3a5fc66b3653228cdaa22a19d9fbb" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.348724 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-88lhc" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.349929 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-87bbz" event={"ID":"c3621768-9f07-459e-9d47-afd14d36900f","Type":"ContainerDied","Data":"d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.349973 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5f1ff6ef78b0f317bf45e9582d70b579832feb35229ef57fd14b98da71bed7d" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.350043 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-87bbz" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.357927 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5hsmh" event={"ID":"f1ff745e-64b7-4f3d-8cf0-69dd338f1996","Type":"ContainerDied","Data":"f22b4f763f747fbf5f8553dec3cbbf25b9dd846120cb7895345102b5689981f6"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.357976 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f22b4f763f747fbf5f8553dec3cbbf25b9dd846120cb7895345102b5689981f6" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.358042 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5hsmh" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.361235 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9147-account-create-update-75s8j" event={"ID":"5919579f-48cb-4d2f-94b6-6277af39d739","Type":"ContainerDied","Data":"ca975df52a83466ccaf4bbe1ca5fe24469e6717d900f88520f19d253b59166b2"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.361267 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca975df52a83466ccaf4bbe1ca5fe24469e6717d900f88520f19d253b59166b2" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.361310 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-75s8j" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.364716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58c44471-d442-4736-a649-c762a1c893fa","Type":"ContainerStarted","Data":"254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.368361 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.368426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d2c0-account-create-update-d7mgx" event={"ID":"5888222d-3a6a-4e9c-8dd2-543d2029826e","Type":"ContainerDied","Data":"700bf450c20d4d3fda24fb4e9c2760f5743ea62e5741c7ac9386560f63aceb68"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.368645 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="700bf450c20d4d3fda24fb4e9c2760f5743ea62e5741c7ac9386560f63aceb68" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.371431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2f02096-123d-472d-b9b8-0083f07a1c39","Type":"ContainerDied","Data":"feeef7a04bccde203705fb90b95c9f4cee56a292e7f552ee550ae34f48057297"} Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.371475 4998 scope.go:117] "RemoveContainer" containerID="d06782cbd10ecb30fa6f586c2374c1f4483c16ae1abd0337db2c800abbcc66c9" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.371611 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.388605 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.036551419 podStartE2EDuration="14.388582773s" podCreationTimestamp="2026-02-03 07:06:32 +0000 UTC" firstStartedPulling="2026-02-03 07:06:33.218231696 +0000 UTC m=+1231.504925502" lastFinishedPulling="2026-02-03 07:06:45.57026305 +0000 UTC m=+1243.856956856" observedRunningTime="2026-02-03 07:06:46.382266142 +0000 UTC m=+1244.668959948" watchObservedRunningTime="2026-02-03 07:06:46.388582773 +0000 UTC m=+1244.675276599" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.420627 4998 scope.go:117] "RemoveContainer" containerID="d9ed4bfead5f617e4d802af2a38ec102e9c352fa063909946be14e658336963b" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.452026 4998 scope.go:117] "RemoveContainer" containerID="e7f498d8f6533ce735923bd13aec435c837c30da33742475080c9a11194c318d" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.488891 4998 scope.go:117] "RemoveContainer" containerID="276b273455e2be4c64a0b5e8c8bc0ac251a5dc82b27ff29f590dd638ceae4342" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.505949 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.526056 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.553504 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554273 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5919579f-48cb-4d2f-94b6-6277af39d739" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554292 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5919579f-48cb-4d2f-94b6-6277af39d739" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554304 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-central-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554312 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-central-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554320 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="sg-core" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554326 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="sg-core" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554339 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-notification-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554345 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-notification-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554351 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5888222d-3a6a-4e9c-8dd2-543d2029826e" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554356 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5888222d-3a6a-4e9c-8dd2-543d2029826e" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554368 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="proxy-httpd" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554373 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="proxy-httpd" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554384 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa049756-1151-4b26-8f4a-669b4bf01ac3" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554394 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa049756-1151-4b26-8f4a-669b4bf01ac3" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554406 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d815447-a2cd-470c-84d9-431e9971596a" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554414 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d815447-a2cd-470c-84d9-431e9971596a" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554429 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1ff745e-64b7-4f3d-8cf0-69dd338f1996" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554448 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1ff745e-64b7-4f3d-8cf0-69dd338f1996" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: E0203 07:06:46.554462 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3621768-9f07-459e-9d47-afd14d36900f" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554469 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3621768-9f07-459e-9d47-afd14d36900f" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554656 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3621768-9f07-459e-9d47-afd14d36900f" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554673 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d815447-a2cd-470c-84d9-431e9971596a" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554683 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="sg-core" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554691 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5919579f-48cb-4d2f-94b6-6277af39d739" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554700 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-central-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554712 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5888222d-3a6a-4e9c-8dd2-543d2029826e" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554735 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1ff745e-64b7-4f3d-8cf0-69dd338f1996" containerName="mariadb-database-create" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554745 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa049756-1151-4b26-8f4a-669b4bf01ac3" containerName="mariadb-account-create-update" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554753 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="ceilometer-notification-agent" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.554760 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" containerName="proxy-httpd" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.556195 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.558503 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.558825 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.576596 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613521 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613567 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnwsp\" (UniqueName: \"kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613665 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613722 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613741 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613803 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.613822 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715144 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715408 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715478 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715553 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnwsp\" (UniqueName: \"kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715626 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715756 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.715873 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.717404 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.718338 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.722364 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.722518 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.724252 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.724555 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.738076 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnwsp\" (UniqueName: \"kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp\") pod \"ceilometer-0\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " pod="openstack/ceilometer-0" Feb 03 07:06:46 crc kubenswrapper[4998]: I0203 07:06:46.873461 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:47 crc kubenswrapper[4998]: I0203 07:06:47.324731 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:47 crc kubenswrapper[4998]: W0203 07:06:47.326581 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f716ae2_1189_4011_9cdd_2e0fcecd2001.slice/crio-13bfb1a46cf2d14e17033b47bc86b616f830e4c97fdb2e0f510a618e4a15707a WatchSource:0}: Error finding container 13bfb1a46cf2d14e17033b47bc86b616f830e4c97fdb2e0f510a618e4a15707a: Status 404 returned error can't find the container with id 13bfb1a46cf2d14e17033b47bc86b616f830e4c97fdb2e0f510a618e4a15707a Feb 03 07:06:47 crc kubenswrapper[4998]: I0203 07:06:47.381618 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerStarted","Data":"13bfb1a46cf2d14e17033b47bc86b616f830e4c97fdb2e0f510a618e4a15707a"} Feb 03 07:06:47 crc kubenswrapper[4998]: I0203 07:06:47.809057 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.369673 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.394124 4998 generic.go:334] "Generic (PLEG): container finished" podID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerID="776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e" exitCode=137 Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.394197 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerDied","Data":"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e"} Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.394232 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7cdf6753-a819-499c-ab8d-1b48d75424a6","Type":"ContainerDied","Data":"3402adf87548c1e53149adebc7d3a011a4628158f4276d0252cb9c2ab9fb66e5"} Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.394253 4998 scope.go:117] "RemoveContainer" containerID="776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.394368 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.413888 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerStarted","Data":"b0e03c6903fb418fe5ef193a6318a6cfcd2bc6374a0d3dba67065d83f231d952"} Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.433589 4998 scope.go:117] "RemoveContainer" containerID="385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.444820 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.444868 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.444949 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.445046 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.445124 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.445156 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rczdg\" (UniqueName: \"kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.445251 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts\") pod \"7cdf6753-a819-499c-ab8d-1b48d75424a6\" (UID: \"7cdf6753-a819-499c-ab8d-1b48d75424a6\") " Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.447026 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2f02096-123d-472d-b9b8-0083f07a1c39" path="/var/lib/kubelet/pods/c2f02096-123d-472d-b9b8-0083f07a1c39/volumes" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.450285 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs" (OuterVolumeSpecName: "logs") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.455100 4998 scope.go:117] "RemoveContainer" containerID="776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e" Feb 03 07:06:48 crc kubenswrapper[4998]: E0203 07:06:48.455568 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e\": container with ID starting with 776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e not found: ID does not exist" containerID="776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.455617 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e"} err="failed to get container status \"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e\": rpc error: code = NotFound desc = could not find container \"776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e\": container with ID starting with 776cc65b766fa9c2731c5d100620695aca396af1086d0ae12b80b8674f4fc01e not found: ID does not exist" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.455644 4998 scope.go:117] "RemoveContainer" containerID="385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.455757 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: E0203 07:06:48.457201 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c\": container with ID starting with 385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c not found: ID does not exist" containerID="385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.457296 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c"} err="failed to get container status \"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c\": rpc error: code = NotFound desc = could not find container \"385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c\": container with ID starting with 385955e07e11190022b0f859b911e0b8ff400aad084228f79533cc60dedfe31c not found: ID does not exist" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.458471 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg" (OuterVolumeSpecName: "kube-api-access-rczdg") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "kube-api-access-rczdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.459061 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.461500 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts" (OuterVolumeSpecName: "scripts") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.492705 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.497304 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.500319 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.532472 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data" (OuterVolumeSpecName: "config-data") pod "7cdf6753-a819-499c-ab8d-1b48d75424a6" (UID: "7cdf6753-a819-499c-ab8d-1b48d75424a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546920 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7cdf6753-a819-499c-ab8d-1b48d75424a6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546956 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546966 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rczdg\" (UniqueName: \"kubernetes.io/projected/7cdf6753-a819-499c-ab8d-1b48d75424a6-kube-api-access-rczdg\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546976 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546985 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.546992 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cdf6753-a819-499c-ab8d-1b48d75424a6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.547001 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cdf6753-a819-499c-ab8d-1b48d75424a6-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.790976 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.843926 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.857232 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:48 crc kubenswrapper[4998]: E0203 07:06:48.857934 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api-log" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.857958 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api-log" Feb 03 07:06:48 crc kubenswrapper[4998]: E0203 07:06:48.857976 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.857984 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.858285 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api-log" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.858302 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" containerName="cinder-api" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.859414 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.863375 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.863415 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.863707 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.865553 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957096 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957211 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957308 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957328 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957371 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957403 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957422 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c7sz\" (UniqueName: \"kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957442 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:48 crc kubenswrapper[4998]: I0203 07:06:48.957471 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.058882 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.058961 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.058980 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c7sz\" (UniqueName: \"kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059004 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059048 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059111 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059170 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059458 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059475 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.059593 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.060462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.064891 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.064941 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.065550 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.067363 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.067436 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.068438 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.076696 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c7sz\" (UniqueName: \"kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz\") pod \"cinder-api-0\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.196334 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.448336 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerStarted","Data":"c394b749fd034302a3b77b45ea2c12171ea8b605ec1002682b52eecb0b931ab6"} Feb 03 07:06:49 crc kubenswrapper[4998]: I0203 07:06:49.740149 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:06:49 crc kubenswrapper[4998]: W0203 07:06:49.743138 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59162297_8dd9_4ddd_a18b_8045d2f6c610.slice/crio-81e179c62f076b1235befec968a1c924c07c06497035bf25bb7be2df848c48d0 WatchSource:0}: Error finding container 81e179c62f076b1235befec968a1c924c07c06497035bf25bb7be2df848c48d0: Status 404 returned error can't find the container with id 81e179c62f076b1235befec968a1c924c07c06497035bf25bb7be2df848c48d0 Feb 03 07:06:50 crc kubenswrapper[4998]: I0203 07:06:50.448210 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cdf6753-a819-499c-ab8d-1b48d75424a6" path="/var/lib/kubelet/pods/7cdf6753-a819-499c-ab8d-1b48d75424a6/volumes" Feb 03 07:06:50 crc kubenswrapper[4998]: I0203 07:06:50.489070 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerStarted","Data":"c8ecd618c97f3a3e9529f6bba846a36804684550e69c654736d13c93bc53dfe3"} Feb 03 07:06:50 crc kubenswrapper[4998]: I0203 07:06:50.492938 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerStarted","Data":"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2"} Feb 03 07:06:50 crc kubenswrapper[4998]: I0203 07:06:50.492992 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerStarted","Data":"81e179c62f076b1235befec968a1c924c07c06497035bf25bb7be2df848c48d0"} Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.474638 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sqllz"] Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.476273 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.482452 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vbhdd" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.482854 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.496794 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.534231 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l497\" (UniqueName: \"kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.534638 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.534750 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.535852 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.542928 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sqllz"] Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.544972 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerStarted","Data":"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5"} Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.546239 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.583747 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.583725085 podStartE2EDuration="3.583725085s" podCreationTimestamp="2026-02-03 07:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:06:51.575922332 +0000 UTC m=+1249.862616138" watchObservedRunningTime="2026-02-03 07:06:51.583725085 +0000 UTC m=+1249.870418891" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.637657 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l497\" (UniqueName: \"kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.637744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.637764 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.637866 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.646520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.647190 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.660300 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l497\" (UniqueName: \"kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.664349 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sqllz\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:51 crc kubenswrapper[4998]: I0203 07:06:51.809219 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.322528 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sqllz"] Feb 03 07:06:52 crc kubenswrapper[4998]: W0203 07:06:52.330093 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode485e612_9e0b_4d19_a326_523613449a06.slice/crio-cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33 WatchSource:0}: Error finding container cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33: Status 404 returned error can't find the container with id cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33 Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.555984 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerStarted","Data":"59f41a9b1fc5000b09d5418190451a798299591f4cf6f6235e45b98bfba625e4"} Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.556168 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-central-agent" containerID="cri-o://b0e03c6903fb418fe5ef193a6318a6cfcd2bc6374a0d3dba67065d83f231d952" gracePeriod=30 Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.556449 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.556801 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="proxy-httpd" containerID="cri-o://59f41a9b1fc5000b09d5418190451a798299591f4cf6f6235e45b98bfba625e4" gracePeriod=30 Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.556864 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="sg-core" containerID="cri-o://c8ecd618c97f3a3e9529f6bba846a36804684550e69c654736d13c93bc53dfe3" gracePeriod=30 Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.556903 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-notification-agent" containerID="cri-o://c394b749fd034302a3b77b45ea2c12171ea8b605ec1002682b52eecb0b931ab6" gracePeriod=30 Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.562433 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sqllz" event={"ID":"e485e612-9e0b-4d19-a326-523613449a06","Type":"ContainerStarted","Data":"cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33"} Feb 03 07:06:52 crc kubenswrapper[4998]: I0203 07:06:52.581320 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.004514706 podStartE2EDuration="6.58129856s" podCreationTimestamp="2026-02-03 07:06:46 +0000 UTC" firstStartedPulling="2026-02-03 07:06:47.328806648 +0000 UTC m=+1245.615500454" lastFinishedPulling="2026-02-03 07:06:51.905590502 +0000 UTC m=+1250.192284308" observedRunningTime="2026-02-03 07:06:52.578411437 +0000 UTC m=+1250.865105263" watchObservedRunningTime="2026-02-03 07:06:52.58129856 +0000 UTC m=+1250.867992366" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.575966 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerID="59f41a9b1fc5000b09d5418190451a798299591f4cf6f6235e45b98bfba625e4" exitCode=0 Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.576248 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerID="c8ecd618c97f3a3e9529f6bba846a36804684550e69c654736d13c93bc53dfe3" exitCode=2 Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.576261 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerID="c394b749fd034302a3b77b45ea2c12171ea8b605ec1002682b52eecb0b931ab6" exitCode=0 Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.576272 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerID="b0e03c6903fb418fe5ef193a6318a6cfcd2bc6374a0d3dba67065d83f231d952" exitCode=0 Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.577151 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerDied","Data":"59f41a9b1fc5000b09d5418190451a798299591f4cf6f6235e45b98bfba625e4"} Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.577183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerDied","Data":"c8ecd618c97f3a3e9529f6bba846a36804684550e69c654736d13c93bc53dfe3"} Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.577193 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerDied","Data":"c394b749fd034302a3b77b45ea2c12171ea8b605ec1002682b52eecb0b931ab6"} Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.577202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerDied","Data":"b0e03c6903fb418fe5ef193a6318a6cfcd2bc6374a0d3dba67065d83f231d952"} Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.799278 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.890987 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891104 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891149 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891183 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnwsp\" (UniqueName: \"kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891217 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891345 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891404 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd\") pod \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\" (UID: \"5f716ae2-1189-4011-9cdd-2e0fcecd2001\") " Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.891696 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.892037 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.892260 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.892287 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f716ae2-1189-4011-9cdd-2e0fcecd2001-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.897004 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts" (OuterVolumeSpecName: "scripts") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.897567 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp" (OuterVolumeSpecName: "kube-api-access-vnwsp") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "kube-api-access-vnwsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.931317 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.985095 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.993998 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.994028 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.994039 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:53 crc kubenswrapper[4998]: I0203 07:06:53.994047 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnwsp\" (UniqueName: \"kubernetes.io/projected/5f716ae2-1189-4011-9cdd-2e0fcecd2001-kube-api-access-vnwsp\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.008423 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data" (OuterVolumeSpecName: "config-data") pod "5f716ae2-1189-4011-9cdd-2e0fcecd2001" (UID: "5f716ae2-1189-4011-9cdd-2e0fcecd2001"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.095532 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f716ae2-1189-4011-9cdd-2e0fcecd2001-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.477420 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.569432 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.569726 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7787cd6574-rthd9" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-api" containerID="cri-o://038ddedd8269a4ec6b7e004267d315bb7d8a78e0674e67c6566730cef59cf318" gracePeriod=30 Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.570132 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7787cd6574-rthd9" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-httpd" containerID="cri-o://f311a13a918df92982ce4a22f5dec1f02faec87241db57192d352931f4fee838" gracePeriod=30 Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.618448 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f716ae2-1189-4011-9cdd-2e0fcecd2001","Type":"ContainerDied","Data":"13bfb1a46cf2d14e17033b47bc86b616f830e4c97fdb2e0f510a618e4a15707a"} Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.618498 4998 scope.go:117] "RemoveContainer" containerID="59f41a9b1fc5000b09d5418190451a798299591f4cf6f6235e45b98bfba625e4" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.618628 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.651586 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.670946 4998 scope.go:117] "RemoveContainer" containerID="c8ecd618c97f3a3e9529f6bba846a36804684550e69c654736d13c93bc53dfe3" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.679826 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.696361 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:54 crc kubenswrapper[4998]: E0203 07:06:54.696772 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-central-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.696804 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-central-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: E0203 07:06:54.696822 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-notification-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.696829 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-notification-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: E0203 07:06:54.696843 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="proxy-httpd" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.696849 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="proxy-httpd" Feb 03 07:06:54 crc kubenswrapper[4998]: E0203 07:06:54.696861 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="sg-core" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.696867 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="sg-core" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.697029 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-notification-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.697045 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="proxy-httpd" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.697054 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="sg-core" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.697062 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" containerName="ceilometer-central-agent" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.698564 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.703433 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.703666 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.719915 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.727422 4998 scope.go:117] "RemoveContainer" containerID="c394b749fd034302a3b77b45ea2c12171ea8b605ec1002682b52eecb0b931ab6" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.781321 4998 scope.go:117] "RemoveContainer" containerID="b0e03c6903fb418fe5ef193a6318a6cfcd2bc6374a0d3dba67065d83f231d952" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.817964 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818074 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz6xc\" (UniqueName: \"kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818109 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818143 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818173 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818203 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.818242 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920284 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz6xc\" (UniqueName: \"kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920343 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920390 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920422 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920450 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920487 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920618 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.920996 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.921043 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.926065 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.927009 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.935553 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.941709 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz6xc\" (UniqueName: \"kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:54 crc kubenswrapper[4998]: I0203 07:06:54.942851 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data\") pod \"ceilometer-0\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " pod="openstack/ceilometer-0" Feb 03 07:06:55 crc kubenswrapper[4998]: I0203 07:06:55.057428 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:06:55 crc kubenswrapper[4998]: I0203 07:06:55.575843 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:55 crc kubenswrapper[4998]: W0203 07:06:55.584093 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccb119a6_e9ac_49e5_9980_a150bf800d2a.slice/crio-1dfe367ddda70785fa2bb8c4aa100638a046decd5ecf719a3184ee0a60f8c412 WatchSource:0}: Error finding container 1dfe367ddda70785fa2bb8c4aa100638a046decd5ecf719a3184ee0a60f8c412: Status 404 returned error can't find the container with id 1dfe367ddda70785fa2bb8c4aa100638a046decd5ecf719a3184ee0a60f8c412 Feb 03 07:06:55 crc kubenswrapper[4998]: I0203 07:06:55.632643 4998 generic.go:334] "Generic (PLEG): container finished" podID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerID="f311a13a918df92982ce4a22f5dec1f02faec87241db57192d352931f4fee838" exitCode=0 Feb 03 07:06:55 crc kubenswrapper[4998]: I0203 07:06:55.632731 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerDied","Data":"f311a13a918df92982ce4a22f5dec1f02faec87241db57192d352931f4fee838"} Feb 03 07:06:55 crc kubenswrapper[4998]: I0203 07:06:55.633894 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerStarted","Data":"1dfe367ddda70785fa2bb8c4aa100638a046decd5ecf719a3184ee0a60f8c412"} Feb 03 07:06:56 crc kubenswrapper[4998]: I0203 07:06:56.438250 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f716ae2-1189-4011-9cdd-2e0fcecd2001" path="/var/lib/kubelet/pods/5f716ae2-1189-4011-9cdd-2e0fcecd2001/volumes" Feb 03 07:06:56 crc kubenswrapper[4998]: I0203 07:06:56.807486 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:06:59 crc kubenswrapper[4998]: I0203 07:06:59.677161 4998 generic.go:334] "Generic (PLEG): container finished" podID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerID="038ddedd8269a4ec6b7e004267d315bb7d8a78e0674e67c6566730cef59cf318" exitCode=0 Feb 03 07:06:59 crc kubenswrapper[4998]: I0203 07:06:59.677233 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerDied","Data":"038ddedd8269a4ec6b7e004267d315bb7d8a78e0674e67c6566730cef59cf318"} Feb 03 07:07:00 crc kubenswrapper[4998]: I0203 07:07:00.668760 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:07:00 crc kubenswrapper[4998]: I0203 07:07:00.672751 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:07:00 crc kubenswrapper[4998]: I0203 07:07:00.753053 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:07:00 crc kubenswrapper[4998]: I0203 07:07:00.753305 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-56ff8c5b4b-c52fv" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-log" containerID="cri-o://db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b" gracePeriod=30 Feb 03 07:07:00 crc kubenswrapper[4998]: I0203 07:07:00.753723 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-56ff8c5b4b-c52fv" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-api" containerID="cri-o://1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32" gracePeriod=30 Feb 03 07:07:01 crc kubenswrapper[4998]: I0203 07:07:01.516456 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 03 07:07:01 crc kubenswrapper[4998]: I0203 07:07:01.704081 4998 generic.go:334] "Generic (PLEG): container finished" podID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerID="db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b" exitCode=143 Feb 03 07:07:01 crc kubenswrapper[4998]: I0203 07:07:01.704162 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerDied","Data":"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b"} Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.245304 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.245638 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-log" containerID="cri-o://35b45a5b0f8581c84fc3e062610dd5acaf810616fda9be2adfa678c3edf552b3" gracePeriod=30 Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.246335 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-httpd" containerID="cri-o://3c1e4ffe1c3edfbfc9d77cde2bc9b71dfa60f0a7da07ebbb4913417195ff50c3" gracePeriod=30 Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.568996 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.690117 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs\") pod \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.690227 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xcf7\" (UniqueName: \"kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7\") pod \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.690274 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config\") pod \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.690297 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config\") pod \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.690395 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle\") pod \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\" (UID: \"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8\") " Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.696990 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7" (OuterVolumeSpecName: "kube-api-access-5xcf7") pod "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" (UID: "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8"). InnerVolumeSpecName "kube-api-access-5xcf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.697848 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" (UID: "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.719509 4998 generic.go:334] "Generic (PLEG): container finished" podID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerID="35b45a5b0f8581c84fc3e062610dd5acaf810616fda9be2adfa678c3edf552b3" exitCode=143 Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.719604 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerDied","Data":"35b45a5b0f8581c84fc3e062610dd5acaf810616fda9be2adfa678c3edf552b3"} Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.724703 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7787cd6574-rthd9" event={"ID":"dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8","Type":"ContainerDied","Data":"015a5235414b7f07773d5a5c216add7dd2cadfcd9e9548caa9add60f57a0d0c7"} Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.724760 4998 scope.go:117] "RemoveContainer" containerID="f311a13a918df92982ce4a22f5dec1f02faec87241db57192d352931f4fee838" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.724767 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7787cd6574-rthd9" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.731698 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sqllz" event={"ID":"e485e612-9e0b-4d19-a326-523613449a06","Type":"ContainerStarted","Data":"8a11e53e794f1fad621650795f023987d0a74334697f2a88640796077ad63bc4"} Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.736323 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerStarted","Data":"ebdf5e0b8a0859716f4b568e67cdaa631811329bf8b049bc9dac2b4ed056b776"} Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.753398 4998 scope.go:117] "RemoveContainer" containerID="038ddedd8269a4ec6b7e004267d315bb7d8a78e0674e67c6566730cef59cf318" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.760575 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config" (OuterVolumeSpecName: "config") pod "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" (UID: "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.766746 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-sqllz" podStartSLOduration=1.916207719 podStartE2EDuration="11.766730735s" podCreationTimestamp="2026-02-03 07:06:51 +0000 UTC" firstStartedPulling="2026-02-03 07:06:52.332722397 +0000 UTC m=+1250.619416203" lastFinishedPulling="2026-02-03 07:07:02.183245413 +0000 UTC m=+1260.469939219" observedRunningTime="2026-02-03 07:07:02.753841537 +0000 UTC m=+1261.040535353" watchObservedRunningTime="2026-02-03 07:07:02.766730735 +0000 UTC m=+1261.053424541" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.771986 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" (UID: "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.792590 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xcf7\" (UniqueName: \"kubernetes.io/projected/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-kube-api-access-5xcf7\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.792635 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.792650 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.792662 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.801092 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" (UID: "dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:02 crc kubenswrapper[4998]: I0203 07:07:02.894451 4998 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:03 crc kubenswrapper[4998]: I0203 07:07:03.165493 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:07:03 crc kubenswrapper[4998]: I0203 07:07:03.173415 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7787cd6574-rthd9"] Feb 03 07:07:03 crc kubenswrapper[4998]: I0203 07:07:03.748154 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerStarted","Data":"d0c47d808dab088e1956c0ff99b9c4ce68a2b8d7aa2fa5641fa9b2af12094e7b"} Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.389127 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425458 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425510 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4kj8\" (UniqueName: \"kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425584 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425673 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425713 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425752 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.425803 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs\") pod \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\" (UID: \"4b05b57d-c23c-4f54-b6a2-f00fd972a645\") " Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.426647 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs" (OuterVolumeSpecName: "logs") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.437258 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts" (OuterVolumeSpecName: "scripts") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.437424 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8" (OuterVolumeSpecName: "kube-api-access-x4kj8") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "kube-api-access-x4kj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.448096 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" path="/var/lib/kubelet/pods/dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8/volumes" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.511973 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.533119 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.533160 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4kj8\" (UniqueName: \"kubernetes.io/projected/4b05b57d-c23c-4f54-b6a2-f00fd972a645-kube-api-access-x4kj8\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.533176 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.533186 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b05b57d-c23c-4f54-b6a2-f00fd972a645-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.559961 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data" (OuterVolumeSpecName: "config-data") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.566966 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.594229 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4b05b57d-c23c-4f54-b6a2-f00fd972a645" (UID: "4b05b57d-c23c-4f54-b6a2-f00fd972a645"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.634608 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.634642 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.634652 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b05b57d-c23c-4f54-b6a2-f00fd972a645-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.758853 4998 generic.go:334] "Generic (PLEG): container finished" podID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerID="1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32" exitCode=0 Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.758924 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerDied","Data":"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32"} Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.758957 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56ff8c5b4b-c52fv" event={"ID":"4b05b57d-c23c-4f54-b6a2-f00fd972a645","Type":"ContainerDied","Data":"14252a855cdaa683d962e21a2ec14dba71a07c75a9f969bdf7e7faddc4d626b2"} Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.758978 4998 scope.go:117] "RemoveContainer" containerID="1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.759110 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56ff8c5b4b-c52fv" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.764125 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerStarted","Data":"4ec46750e035655296412615ff5022daa1dad6f66340e848b861944a0806a7a8"} Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.786823 4998 scope.go:117] "RemoveContainer" containerID="db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.803842 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.809860 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-56ff8c5b4b-c52fv"] Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.841163 4998 scope.go:117] "RemoveContainer" containerID="1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32" Feb 03 07:07:04 crc kubenswrapper[4998]: E0203 07:07:04.848950 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32\": container with ID starting with 1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32 not found: ID does not exist" containerID="1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.849010 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32"} err="failed to get container status \"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32\": rpc error: code = NotFound desc = could not find container \"1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32\": container with ID starting with 1fccb81ac7e9f7c431a9e2be9488418a1b7289be69c45384369859d0ac6a6e32 not found: ID does not exist" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.849041 4998 scope.go:117] "RemoveContainer" containerID="db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b" Feb 03 07:07:04 crc kubenswrapper[4998]: E0203 07:07:04.856117 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b\": container with ID starting with db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b not found: ID does not exist" containerID="db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b" Feb 03 07:07:04 crc kubenswrapper[4998]: I0203 07:07:04.856182 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b"} err="failed to get container status \"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b\": rpc error: code = NotFound desc = could not find container \"db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b\": container with ID starting with db9ecc90aaa6dd8decbfaa5d261b6869a3cc296d7722872bfc4021effd336a6b not found: ID does not exist" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.784863 4998 generic.go:334] "Generic (PLEG): container finished" podID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerID="3c1e4ffe1c3edfbfc9d77cde2bc9b71dfa60f0a7da07ebbb4913417195ff50c3" exitCode=0 Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.785228 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerDied","Data":"3c1e4ffe1c3edfbfc9d77cde2bc9b71dfa60f0a7da07ebbb4913417195ff50c3"} Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.901796 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963052 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963144 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963190 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcml2\" (UniqueName: \"kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963297 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963323 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963358 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.963395 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run\") pod \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\" (UID: \"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e\") " Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.966730 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs" (OuterVolumeSpecName: "logs") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.966760 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.967874 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.970881 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts" (OuterVolumeSpecName: "scripts") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:05 crc kubenswrapper[4998]: I0203 07:07:05.983997 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2" (OuterVolumeSpecName: "kube-api-access-wcml2") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "kube-api-access-wcml2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.014182 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.038169 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data" (OuterVolumeSpecName: "config-data") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.049472 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" (UID: "c0cbd21f-2ac7-4c48-a160-18e816ed8b2e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067465 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067491 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067503 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcml2\" (UniqueName: \"kubernetes.io/projected/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-kube-api-access-wcml2\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067533 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067542 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067551 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067559 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.067567 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.091398 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.169358 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.176887 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.177288 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-log" containerID="cri-o://b2e6b960df78a7b235174eed618b1694fe06dbf454005c530f409ecf38f4fe69" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.177815 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-httpd" containerID="cri-o://815107d7365bb8588bc5fb81ef98f19fde8733cb829325ab5989bc5fcfe8dc4a" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.438596 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" path="/var/lib/kubelet/pods/4b05b57d-c23c-4f54-b6a2-f00fd972a645/volumes" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.795170 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0cbd21f-2ac7-4c48-a160-18e816ed8b2e","Type":"ContainerDied","Data":"a499ee8bd5bd4bf2f56bb3e9a601d3d7a4abbc84fc340c83cc59ed733fc213e9"} Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.795531 4998 scope.go:117] "RemoveContainer" containerID="3c1e4ffe1c3edfbfc9d77cde2bc9b71dfa60f0a7da07ebbb4913417195ff50c3" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.795353 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.804831 4998 generic.go:334] "Generic (PLEG): container finished" podID="c38473eb-790f-454e-b5fb-f382292895c1" containerID="b2e6b960df78a7b235174eed618b1694fe06dbf454005c530f409ecf38f4fe69" exitCode=143 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.804921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerDied","Data":"b2e6b960df78a7b235174eed618b1694fe06dbf454005c530f409ecf38f4fe69"} Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.808001 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-central-agent" containerID="cri-o://ebdf5e0b8a0859716f4b568e67cdaa631811329bf8b049bc9dac2b4ed056b776" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.808075 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="proxy-httpd" containerID="cri-o://f80dae4aa5f84f296f99d3a7ab7f21eecc5d1d379d85bed0125adc14744998ef" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.808107 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="sg-core" containerID="cri-o://4ec46750e035655296412615ff5022daa1dad6f66340e848b861944a0806a7a8" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.808166 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-notification-agent" containerID="cri-o://d0c47d808dab088e1956c0ff99b9c4ce68a2b8d7aa2fa5641fa9b2af12094e7b" gracePeriod=30 Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.807776 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerStarted","Data":"f80dae4aa5f84f296f99d3a7ab7f21eecc5d1d379d85bed0125adc14744998ef"} Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.808331 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.824366 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.853944 4998 scope.go:117] "RemoveContainer" containerID="35b45a5b0f8581c84fc3e062610dd5acaf810616fda9be2adfa678c3edf552b3" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.854080 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.868920 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869422 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869444 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869460 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-log" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869468 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-log" Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869509 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-api" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869518 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-api" Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869539 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-api" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869546 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-api" Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869555 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-log" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869564 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-log" Feb 03 07:07:06 crc kubenswrapper[4998]: E0203 07:07:06.869583 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869590 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869823 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-log" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869848 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-log" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869861 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" containerName="glance-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869873 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-api" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869888 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd780b55-9e0a-4dbd-878d-69ca2aa4f2a8" containerName="neutron-httpd" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.869902 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b05b57d-c23c-4f54-b6a2-f00fd972a645" containerName="placement-api" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.871015 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.876307 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.876357 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.877536 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.506903259 podStartE2EDuration="12.877516936s" podCreationTimestamp="2026-02-03 07:06:54 +0000 UTC" firstStartedPulling="2026-02-03 07:06:55.586312084 +0000 UTC m=+1253.873005890" lastFinishedPulling="2026-02-03 07:07:05.956925761 +0000 UTC m=+1264.243619567" observedRunningTime="2026-02-03 07:07:06.861773736 +0000 UTC m=+1265.148467552" watchObservedRunningTime="2026-02-03 07:07:06.877516936 +0000 UTC m=+1265.164210742" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.901243 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982266 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982333 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982454 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982559 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982606 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jx9q\" (UniqueName: \"kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982628 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.982836 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:06 crc kubenswrapper[4998]: I0203 07:07:06.983050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.084554 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.085205 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jx9q\" (UniqueName: \"kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.085332 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.084911 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.085651 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.085915 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.086195 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.086258 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.086289 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.086860 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.087232 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.092996 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.093473 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.097582 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.097725 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.104332 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jx9q\" (UniqueName: \"kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.118314 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.201460 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.818488 4998 generic.go:334] "Generic (PLEG): container finished" podID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerID="f80dae4aa5f84f296f99d3a7ab7f21eecc5d1d379d85bed0125adc14744998ef" exitCode=0 Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.818964 4998 generic.go:334] "Generic (PLEG): container finished" podID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerID="4ec46750e035655296412615ff5022daa1dad6f66340e848b861944a0806a7a8" exitCode=2 Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.818975 4998 generic.go:334] "Generic (PLEG): container finished" podID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerID="d0c47d808dab088e1956c0ff99b9c4ce68a2b8d7aa2fa5641fa9b2af12094e7b" exitCode=0 Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.818558 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerDied","Data":"f80dae4aa5f84f296f99d3a7ab7f21eecc5d1d379d85bed0125adc14744998ef"} Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.819040 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerDied","Data":"4ec46750e035655296412615ff5022daa1dad6f66340e848b861944a0806a7a8"} Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.819055 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerDied","Data":"d0c47d808dab088e1956c0ff99b9c4ce68a2b8d7aa2fa5641fa9b2af12094e7b"} Feb 03 07:07:07 crc kubenswrapper[4998]: I0203 07:07:07.859814 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:07:08 crc kubenswrapper[4998]: I0203 07:07:08.442505 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0cbd21f-2ac7-4c48-a160-18e816ed8b2e" path="/var/lib/kubelet/pods/c0cbd21f-2ac7-4c48-a160-18e816ed8b2e/volumes" Feb 03 07:07:08 crc kubenswrapper[4998]: I0203 07:07:08.835324 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerStarted","Data":"f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5"} Feb 03 07:07:08 crc kubenswrapper[4998]: I0203 07:07:08.835376 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerStarted","Data":"8120ba3ef48a2454a4d7882b34f5816ff1db40ae331e6ccb89f27488c8e2810c"} Feb 03 07:07:09 crc kubenswrapper[4998]: I0203 07:07:09.846416 4998 generic.go:334] "Generic (PLEG): container finished" podID="c38473eb-790f-454e-b5fb-f382292895c1" containerID="815107d7365bb8588bc5fb81ef98f19fde8733cb829325ab5989bc5fcfe8dc4a" exitCode=0 Feb 03 07:07:09 crc kubenswrapper[4998]: I0203 07:07:09.846450 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerDied","Data":"815107d7365bb8588bc5fb81ef98f19fde8733cb829325ab5989bc5fcfe8dc4a"} Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.805122 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.865183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerStarted","Data":"9da387018f31a72ba944e277abd1e671ca26b317532692abef949c8149cd34b5"} Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.867426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c38473eb-790f-454e-b5fb-f382292895c1","Type":"ContainerDied","Data":"4d18402fc804ad1803a6c38008bb96a38925ddb4bd17e296638e85595ddaf9e5"} Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.867457 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.867473 4998 scope.go:117] "RemoveContainer" containerID="815107d7365bb8588bc5fb81ef98f19fde8733cb829325ab5989bc5fcfe8dc4a" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895257 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895459 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895513 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895548 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895566 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895603 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86kjg\" (UniqueName: \"kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895621 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.895640 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts\") pod \"c38473eb-790f-454e-b5fb-f382292895c1\" (UID: \"c38473eb-790f-454e-b5fb-f382292895c1\") " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.896360 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs" (OuterVolumeSpecName: "logs") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.896870 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.897080 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.897095 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c38473eb-790f-454e-b5fb-f382292895c1-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.900300 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.900286355 podStartE2EDuration="5.900286355s" podCreationTimestamp="2026-02-03 07:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:11.883842756 +0000 UTC m=+1270.170536582" watchObservedRunningTime="2026-02-03 07:07:11.900286355 +0000 UTC m=+1270.186980161" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.911956 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg" (OuterVolumeSpecName: "kube-api-access-86kjg") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "kube-api-access-86kjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.926041 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.927527 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts" (OuterVolumeSpecName: "scripts") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.948641 4998 scope.go:117] "RemoveContainer" containerID="b2e6b960df78a7b235174eed618b1694fe06dbf454005c530f409ecf38f4fe69" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.974370 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.974943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.985514 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data" (OuterVolumeSpecName: "config-data") pod "c38473eb-790f-454e-b5fb-f382292895c1" (UID: "c38473eb-790f-454e-b5fb-f382292895c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.999763 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.999817 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.999828 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.999838 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:11 crc kubenswrapper[4998]: I0203 07:07:11.999947 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86kjg\" (UniqueName: \"kubernetes.io/projected/c38473eb-790f-454e-b5fb-f382292895c1-kube-api-access-86kjg\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.000550 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38473eb-790f-454e-b5fb-f382292895c1-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.018901 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.102144 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.198036 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.210650 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.229697 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:12 crc kubenswrapper[4998]: E0203 07:07:12.230148 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-httpd" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.230171 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-httpd" Feb 03 07:07:12 crc kubenswrapper[4998]: E0203 07:07:12.230184 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-log" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.230192 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-log" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.230410 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-httpd" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.230431 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c38473eb-790f-454e-b5fb-f382292895c1" containerName="glance-log" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.231570 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.233827 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.233962 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.252656 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305388 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305443 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305495 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305591 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfp79\" (UniqueName: \"kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305647 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305671 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305714 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.305812 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.407685 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408083 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408119 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408142 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408210 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfp79\" (UniqueName: \"kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408261 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408286 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408328 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408598 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408814 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.408931 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.442556 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.442616 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.442619 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.443223 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.448437 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c38473eb-790f-454e-b5fb-f382292895c1" path="/var/lib/kubelet/pods/c38473eb-790f-454e-b5fb-f382292895c1/volumes" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.452343 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfp79\" (UniqueName: \"kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.458115 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:07:12 crc kubenswrapper[4998]: I0203 07:07:12.558911 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:13 crc kubenswrapper[4998]: W0203 07:07:13.133310 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4767a1f8_263b_4c49_8cb7_7a7f9b8271dc.slice/crio-918defb49b218088e5c8bb4389970c6bc72985ee7638f7092bc429c731e59c13 WatchSource:0}: Error finding container 918defb49b218088e5c8bb4389970c6bc72985ee7638f7092bc429c731e59c13: Status 404 returned error can't find the container with id 918defb49b218088e5c8bb4389970c6bc72985ee7638f7092bc429c731e59c13 Feb 03 07:07:13 crc kubenswrapper[4998]: I0203 07:07:13.144088 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:07:13 crc kubenswrapper[4998]: I0203 07:07:13.889219 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerStarted","Data":"8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba"} Feb 03 07:07:13 crc kubenswrapper[4998]: I0203 07:07:13.889583 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerStarted","Data":"918defb49b218088e5c8bb4389970c6bc72985ee7638f7092bc429c731e59c13"} Feb 03 07:07:14 crc kubenswrapper[4998]: I0203 07:07:14.899154 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerStarted","Data":"0f64922061bc19d3ba8a787c55577aa51eb3d22b40d2c5ba7c8080728dee564e"} Feb 03 07:07:14 crc kubenswrapper[4998]: I0203 07:07:14.931557 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.931529239 podStartE2EDuration="2.931529239s" podCreationTimestamp="2026-02-03 07:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:14.926446274 +0000 UTC m=+1273.213140100" watchObservedRunningTime="2026-02-03 07:07:14.931529239 +0000 UTC m=+1273.218223045" Feb 03 07:07:15 crc kubenswrapper[4998]: I0203 07:07:15.930654 4998 generic.go:334] "Generic (PLEG): container finished" podID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerID="ebdf5e0b8a0859716f4b568e67cdaa631811329bf8b049bc9dac2b4ed056b776" exitCode=0 Feb 03 07:07:15 crc kubenswrapper[4998]: I0203 07:07:15.931679 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerDied","Data":"ebdf5e0b8a0859716f4b568e67cdaa631811329bf8b049bc9dac2b4ed056b776"} Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.109859 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274351 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274398 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274476 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274515 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274596 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gz6xc\" (UniqueName: \"kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274619 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274659 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle\") pod \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\" (UID: \"ccb119a6-e9ac-49e5-9980-a150bf800d2a\") " Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274877 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.274953 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.275334 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.275357 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccb119a6-e9ac-49e5-9980-a150bf800d2a-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.279578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts" (OuterVolumeSpecName: "scripts") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.289063 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc" (OuterVolumeSpecName: "kube-api-access-gz6xc") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "kube-api-access-gz6xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.307639 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.357993 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.377115 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gz6xc\" (UniqueName: \"kubernetes.io/projected/ccb119a6-e9ac-49e5-9980-a150bf800d2a-kube-api-access-gz6xc\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.377150 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.377161 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.377174 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.385416 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data" (OuterVolumeSpecName: "config-data") pod "ccb119a6-e9ac-49e5-9980-a150bf800d2a" (UID: "ccb119a6-e9ac-49e5-9980-a150bf800d2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.478297 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccb119a6-e9ac-49e5-9980-a150bf800d2a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.942085 4998 generic.go:334] "Generic (PLEG): container finished" podID="e485e612-9e0b-4d19-a326-523613449a06" containerID="8a11e53e794f1fad621650795f023987d0a74334697f2a88640796077ad63bc4" exitCode=0 Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.942168 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sqllz" event={"ID":"e485e612-9e0b-4d19-a326-523613449a06","Type":"ContainerDied","Data":"8a11e53e794f1fad621650795f023987d0a74334697f2a88640796077ad63bc4"} Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.946714 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccb119a6-e9ac-49e5-9980-a150bf800d2a","Type":"ContainerDied","Data":"1dfe367ddda70785fa2bb8c4aa100638a046decd5ecf719a3184ee0a60f8c412"} Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.946763 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.946773 4998 scope.go:117] "RemoveContainer" containerID="f80dae4aa5f84f296f99d3a7ab7f21eecc5d1d379d85bed0125adc14744998ef" Feb 03 07:07:16 crc kubenswrapper[4998]: I0203 07:07:16.968268 4998 scope.go:117] "RemoveContainer" containerID="4ec46750e035655296412615ff5022daa1dad6f66340e848b861944a0806a7a8" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.004037 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.016325 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.034954 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:17 crc kubenswrapper[4998]: E0203 07:07:17.035316 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="sg-core" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035328 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="sg-core" Feb 03 07:07:17 crc kubenswrapper[4998]: E0203 07:07:17.035342 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="proxy-httpd" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035348 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="proxy-httpd" Feb 03 07:07:17 crc kubenswrapper[4998]: E0203 07:07:17.035355 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-central-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035361 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-central-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: E0203 07:07:17.035390 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-notification-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035396 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-notification-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035544 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="sg-core" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035556 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="proxy-httpd" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035568 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-notification-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.035582 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" containerName="ceilometer-central-agent" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.037626 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.038750 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.043558 4998 scope.go:117] "RemoveContainer" containerID="d0c47d808dab088e1956c0ff99b9c4ce68a2b8d7aa2fa5641fa9b2af12094e7b" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.044109 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.044549 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.067435 4998 scope.go:117] "RemoveContainer" containerID="ebdf5e0b8a0859716f4b568e67cdaa631811329bf8b049bc9dac2b4ed056b776" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.202152 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.202212 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203136 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203417 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203447 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvk27\" (UniqueName: \"kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203491 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203543 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.203580 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.262529 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305331 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305463 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvk27\" (UniqueName: \"kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305518 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305560 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305596 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.305638 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.306270 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.306644 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.313666 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.327623 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.328018 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.329503 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.352035 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.353030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvk27\" (UniqueName: \"kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27\") pod \"ceilometer-0\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.370110 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.800088 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.955898 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerStarted","Data":"fdc89ff3daa4d811529c0edcb49de816aefb6cf6cb3a862a790c68a09afb5557"} Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.957484 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:07:17 crc kubenswrapper[4998]: I0203 07:07:17.957630 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.363093 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.439408 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccb119a6-e9ac-49e5-9980-a150bf800d2a" path="/var/lib/kubelet/pods/ccb119a6-e9ac-49e5-9980-a150bf800d2a/volumes" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.524533 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l497\" (UniqueName: \"kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497\") pod \"e485e612-9e0b-4d19-a326-523613449a06\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.524607 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle\") pod \"e485e612-9e0b-4d19-a326-523613449a06\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.524717 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts\") pod \"e485e612-9e0b-4d19-a326-523613449a06\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.524799 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data\") pod \"e485e612-9e0b-4d19-a326-523613449a06\" (UID: \"e485e612-9e0b-4d19-a326-523613449a06\") " Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.529349 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497" (OuterVolumeSpecName: "kube-api-access-7l497") pod "e485e612-9e0b-4d19-a326-523613449a06" (UID: "e485e612-9e0b-4d19-a326-523613449a06"). InnerVolumeSpecName "kube-api-access-7l497". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.530898 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts" (OuterVolumeSpecName: "scripts") pod "e485e612-9e0b-4d19-a326-523613449a06" (UID: "e485e612-9e0b-4d19-a326-523613449a06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.554091 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e485e612-9e0b-4d19-a326-523613449a06" (UID: "e485e612-9e0b-4d19-a326-523613449a06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.558106 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data" (OuterVolumeSpecName: "config-data") pod "e485e612-9e0b-4d19-a326-523613449a06" (UID: "e485e612-9e0b-4d19-a326-523613449a06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.627437 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l497\" (UniqueName: \"kubernetes.io/projected/e485e612-9e0b-4d19-a326-523613449a06-kube-api-access-7l497\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.627471 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.627481 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.627490 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e485e612-9e0b-4d19-a326-523613449a06-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.965733 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sqllz" event={"ID":"e485e612-9e0b-4d19-a326-523613449a06","Type":"ContainerDied","Data":"cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33"} Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.966064 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc873f0ce0f6a02f932ee6fc37ecfaa0e23255fdce7cb0fec5413e26beedaf33" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.966015 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sqllz" Feb 03 07:07:18 crc kubenswrapper[4998]: I0203 07:07:18.976991 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerStarted","Data":"399d45913cb9eab8eb1e4267fcd7e6d1cfdce55f450959649fbc69c1df47bced"} Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.051255 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:07:19 crc kubenswrapper[4998]: E0203 07:07:19.051658 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e485e612-9e0b-4d19-a326-523613449a06" containerName="nova-cell0-conductor-db-sync" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.051670 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e485e612-9e0b-4d19-a326-523613449a06" containerName="nova-cell0-conductor-db-sync" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.051938 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e485e612-9e0b-4d19-a326-523613449a06" containerName="nova-cell0-conductor-db-sync" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.052541 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.055337 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.058924 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vbhdd" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.071737 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.138811 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.138889 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p46cc\" (UniqueName: \"kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.138961 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.241266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.241334 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p46cc\" (UniqueName: \"kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.241390 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.247432 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.247577 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.284503 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p46cc\" (UniqueName: \"kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc\") pod \"nova-cell0-conductor-0\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.454208 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:19 crc kubenswrapper[4998]: I0203 07:07:19.953291 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:07:19 crc kubenswrapper[4998]: W0203 07:07:19.959453 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec1012bb_b11f_4248_aa77_f9076a2a1fc9.slice/crio-08e0a0b900f5ed5356f147f8035f9a669e6c6cbdc6a6f19074d45726d8082e6d WatchSource:0}: Error finding container 08e0a0b900f5ed5356f147f8035f9a669e6c6cbdc6a6f19074d45726d8082e6d: Status 404 returned error can't find the container with id 08e0a0b900f5ed5356f147f8035f9a669e6c6cbdc6a6f19074d45726d8082e6d Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.002233 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec1012bb-b11f-4248-aa77-f9076a2a1fc9","Type":"ContainerStarted","Data":"08e0a0b900f5ed5356f147f8035f9a669e6c6cbdc6a6f19074d45726d8082e6d"} Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.005500 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerStarted","Data":"33df0fedaa8a6ba9ced2dc1af5357329eb9b9365b1144e9121248a8b0882f833"} Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.005538 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerStarted","Data":"505e445e1d61f5314d4840da4a3de1b3067fcf492e8165992044b9893ba78241"} Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.036635 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.038278 4998 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:07:20 crc kubenswrapper[4998]: I0203 07:07:20.159658 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:07:21 crc kubenswrapper[4998]: I0203 07:07:21.016378 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec1012bb-b11f-4248-aa77-f9076a2a1fc9","Type":"ContainerStarted","Data":"659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f"} Feb 03 07:07:21 crc kubenswrapper[4998]: I0203 07:07:21.016712 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:21 crc kubenswrapper[4998]: I0203 07:07:21.041297 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.041277707 podStartE2EDuration="2.041277707s" podCreationTimestamp="2026-02-03 07:07:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:21.031321143 +0000 UTC m=+1279.318014949" watchObservedRunningTime="2026-02-03 07:07:21.041277707 +0000 UTC m=+1279.327971513" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.024961 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerStarted","Data":"e1ca138319732aaeed7b3eca0957a664db0ff4d7e9e21ac834109166de0a53de"} Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.025266 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.046599 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.068741231 podStartE2EDuration="6.046581333s" podCreationTimestamp="2026-02-03 07:07:16 +0000 UTC" firstStartedPulling="2026-02-03 07:07:17.804211692 +0000 UTC m=+1276.090905498" lastFinishedPulling="2026-02-03 07:07:21.782051784 +0000 UTC m=+1280.068745600" observedRunningTime="2026-02-03 07:07:22.04506991 +0000 UTC m=+1280.331763726" watchObservedRunningTime="2026-02-03 07:07:22.046581333 +0000 UTC m=+1280.333275139" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.559173 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.560620 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.589639 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:22 crc kubenswrapper[4998]: I0203 07:07:22.599068 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:23 crc kubenswrapper[4998]: I0203 07:07:23.035930 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:23 crc kubenswrapper[4998]: I0203 07:07:23.035984 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:25 crc kubenswrapper[4998]: I0203 07:07:25.022294 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:25 crc kubenswrapper[4998]: I0203 07:07:25.027582 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:07:29 crc kubenswrapper[4998]: I0203 07:07:29.480463 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.006083 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-ml2n9"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.007131 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.009870 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.012218 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.026038 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ml2n9"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.144101 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.144494 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdn5c\" (UniqueName: \"kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.144552 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.144579 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.245735 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.245864 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdn5c\" (UniqueName: \"kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.245907 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.245926 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.250159 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.252128 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.253527 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.254507 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.269111 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.276363 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.292424 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.313860 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdn5c\" (UniqueName: \"kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c\") pod \"nova-cell0-cell-mapping-ml2n9\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.324320 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.345769 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.347219 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.347267 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl56m\" (UniqueName: \"kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.347295 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.347382 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.359477 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.385850 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.413076 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.414207 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.415726 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454129 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl56m\" (UniqueName: \"kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454192 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454208 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454233 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454282 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.454308 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swrjb\" (UniqueName: \"kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.466346 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.466773 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.470765 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.540171 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl56m\" (UniqueName: \"kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m\") pod \"nova-cell1-novncproxy-0\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557032 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557169 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557197 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557254 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557278 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rcnl\" (UniqueName: \"kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.557445 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swrjb\" (UniqueName: \"kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.559433 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.564241 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.566390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.567882 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.574316 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.581810 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.583640 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.610520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swrjb\" (UniqueName: \"kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb\") pod \"nova-api-0\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.632704 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.652533 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.652947 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.658901 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.658990 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.659031 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rcnl\" (UniqueName: \"kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.659051 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.659091 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmvpv\" (UniqueName: \"kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.659119 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.659156 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.667808 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.668520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.686442 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rcnl\" (UniqueName: \"kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl\") pod \"nova-scheduler-0\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.738620 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769245 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769303 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769338 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769362 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769382 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clzlj\" (UniqueName: \"kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769419 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmvpv\" (UniqueName: \"kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769457 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769509 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769538 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.769567 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.772228 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.775290 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.783615 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.794964 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmvpv\" (UniqueName: \"kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv\") pod \"nova-metadata-0\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.795239 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.805929 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.872279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873697 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873745 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clzlj\" (UniqueName: \"kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873868 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873894 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873973 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.873243 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.874530 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.875188 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.875186 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.875680 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.909748 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clzlj\" (UniqueName: \"kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj\") pod \"dnsmasq-dns-868bc9dc59-wlq6q\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.915546 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:07:30 crc kubenswrapper[4998]: I0203 07:07:30.983790 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.100662 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ml2n9"] Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.422652 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.437620 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:07:31 crc kubenswrapper[4998]: W0203 07:07:31.452568 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacfcdb86_84f3_4f51_b3d2_57be7f9694f6.slice/crio-e9da2bbd019da4a3a22738a0b89d2da1236d5d1e85d854d92ac71542e916a2d9 WatchSource:0}: Error finding container e9da2bbd019da4a3a22738a0b89d2da1236d5d1e85d854d92ac71542e916a2d9: Status 404 returned error can't find the container with id e9da2bbd019da4a3a22738a0b89d2da1236d5d1e85d854d92ac71542e916a2d9 Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.603506 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.620825 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kmnkm"] Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.621987 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.624335 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.624608 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.630300 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kmnkm"] Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.682142 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:07:31 crc kubenswrapper[4998]: W0203 07:07:31.698601 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab00de0e_0ad9_4085_abff_d9383cfeb712.slice/crio-098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf WatchSource:0}: Error finding container 098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf: Status 404 returned error can't find the container with id 098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.699672 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tdvp\" (UniqueName: \"kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.699759 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.699819 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.699866 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.702659 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:07:31 crc kubenswrapper[4998]: W0203 07:07:31.708773 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9182f422_2564_4a1c_b790_d215f7984cf1.slice/crio-82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6 WatchSource:0}: Error finding container 82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6: Status 404 returned error can't find the container with id 82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6 Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.801673 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tdvp\" (UniqueName: \"kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.801984 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.802038 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.802088 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.806316 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.806482 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.809239 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.820111 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tdvp\" (UniqueName: \"kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp\") pod \"nova-cell1-conductor-db-sync-kmnkm\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:31 crc kubenswrapper[4998]: I0203 07:07:31.967471 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.176376 4998 generic.go:334] "Generic (PLEG): container finished" podID="9182f422-2564-4a1c-b790-d215f7984cf1" containerID="91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794" exitCode=0 Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.176716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" event={"ID":"9182f422-2564-4a1c-b790-d215f7984cf1","Type":"ContainerDied","Data":"91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.176748 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" event={"ID":"9182f422-2564-4a1c-b790-d215f7984cf1","Type":"ContainerStarted","Data":"82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.180826 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"acfcdb86-84f3-4f51-b3d2-57be7f9694f6","Type":"ContainerStarted","Data":"e9da2bbd019da4a3a22738a0b89d2da1236d5d1e85d854d92ac71542e916a2d9"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.190373 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ml2n9" event={"ID":"437ab265-6a33-4c00-8afc-707827fcf7d3","Type":"ContainerStarted","Data":"29722612d0982233b88843f2faf98b88c223434bc9d5d4ecba495fb7bf4b1ab7"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.190435 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ml2n9" event={"ID":"437ab265-6a33-4c00-8afc-707827fcf7d3","Type":"ContainerStarted","Data":"a2b48a5a2f2edfff137f271fb584c7757ddb5daa28e6683759e7626347deccd3"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.201439 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06818903-6457-43bf-880e-8b29514a8a08","Type":"ContainerStarted","Data":"44efb5e262378e3ea5987c8e71172a0619d98b141c8ecd1c39a3aa0b308a48f1"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.207415 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerStarted","Data":"098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.210728 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerStarted","Data":"82effbcb9b24f9a96d2f1e8d36b7d54690767179859d9dc717649b69f9db9906"} Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.232160 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-ml2n9" podStartSLOduration=3.232139819 podStartE2EDuration="3.232139819s" podCreationTimestamp="2026-02-03 07:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:32.214350101 +0000 UTC m=+1290.501043917" watchObservedRunningTime="2026-02-03 07:07:32.232139819 +0000 UTC m=+1290.518833625" Feb 03 07:07:32 crc kubenswrapper[4998]: I0203 07:07:32.443416 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kmnkm"] Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.229894 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" event={"ID":"0d737e69-6547-48c5-8bbf-7ca34468e8f6","Type":"ContainerStarted","Data":"52f356947435219b0263bbbd5ce037049ed715085f6b511a973666bee8f1da89"} Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.230229 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" event={"ID":"0d737e69-6547-48c5-8bbf-7ca34468e8f6","Type":"ContainerStarted","Data":"7107b53a3eb587ff78ab5fb53dbdb3fa3118b30d20c307ea5c859b7fde4b8a97"} Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.238763 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" event={"ID":"9182f422-2564-4a1c-b790-d215f7984cf1","Type":"ContainerStarted","Data":"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8"} Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.238817 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.254042 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" podStartSLOduration=2.254023819 podStartE2EDuration="2.254023819s" podCreationTimestamp="2026-02-03 07:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:33.24496806 +0000 UTC m=+1291.531661866" watchObservedRunningTime="2026-02-03 07:07:33.254023819 +0000 UTC m=+1291.540717625" Feb 03 07:07:33 crc kubenswrapper[4998]: I0203 07:07:33.280647 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" podStartSLOduration=3.280623419 podStartE2EDuration="3.280623419s" podCreationTimestamp="2026-02-03 07:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:33.269194122 +0000 UTC m=+1291.555887928" watchObservedRunningTime="2026-02-03 07:07:33.280623419 +0000 UTC m=+1291.567317235" Feb 03 07:07:34 crc kubenswrapper[4998]: I0203 07:07:34.017671 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:07:34 crc kubenswrapper[4998]: I0203 07:07:34.032314 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.258040 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerStarted","Data":"54dbd2cecba320b2fc9213ed3e654126b77a791838b0470017988ad2d984d176"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.260201 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerStarted","Data":"4a88c17630989ed8239da39c28bf5da3949e9fc5efa6d80626f8a6b62e6fe5df"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.272093 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerStarted","Data":"67f41d70a452f5e2318d430f8ca6dbbc9d678bac6ed6dbeb680ad9f6fab077e5"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.272147 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerStarted","Data":"5b97509f0940b2ff84d32ba41eb37019e5a72aa74c7cd8d04ce4177519975f8f"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.272282 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-log" containerID="cri-o://5b97509f0940b2ff84d32ba41eb37019e5a72aa74c7cd8d04ce4177519975f8f" gracePeriod=30 Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.272621 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-metadata" containerID="cri-o://67f41d70a452f5e2318d430f8ca6dbbc9d678bac6ed6dbeb680ad9f6fab077e5" gracePeriod=30 Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.288010 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.053369691 podStartE2EDuration="5.287994886s" podCreationTimestamp="2026-02-03 07:07:30 +0000 UTC" firstStartedPulling="2026-02-03 07:07:31.440124778 +0000 UTC m=+1289.726818584" lastFinishedPulling="2026-02-03 07:07:34.674749943 +0000 UTC m=+1292.961443779" observedRunningTime="2026-02-03 07:07:35.277319621 +0000 UTC m=+1293.564013457" watchObservedRunningTime="2026-02-03 07:07:35.287994886 +0000 UTC m=+1293.574688692" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.288658 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"acfcdb86-84f3-4f51-b3d2-57be7f9694f6","Type":"ContainerStarted","Data":"d4267783ff77c4a1082e7b0ede7b9c76fef91525658a5053c8142da8e9ca8397"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.288699 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d4267783ff77c4a1082e7b0ede7b9c76fef91525658a5053c8142da8e9ca8397" gracePeriod=30 Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.292396 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06818903-6457-43bf-880e-8b29514a8a08","Type":"ContainerStarted","Data":"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa"} Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.305993 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.34906057 podStartE2EDuration="5.30597178s" podCreationTimestamp="2026-02-03 07:07:30 +0000 UTC" firstStartedPulling="2026-02-03 07:07:31.70265862 +0000 UTC m=+1289.989352426" lastFinishedPulling="2026-02-03 07:07:34.65956981 +0000 UTC m=+1292.946263636" observedRunningTime="2026-02-03 07:07:35.299718121 +0000 UTC m=+1293.586411927" watchObservedRunningTime="2026-02-03 07:07:35.30597178 +0000 UTC m=+1293.592665606" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.328535 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.123875836 podStartE2EDuration="5.328519554s" podCreationTimestamp="2026-02-03 07:07:30 +0000 UTC" firstStartedPulling="2026-02-03 07:07:31.454705565 +0000 UTC m=+1289.741399371" lastFinishedPulling="2026-02-03 07:07:34.659349283 +0000 UTC m=+1292.946043089" observedRunningTime="2026-02-03 07:07:35.319733363 +0000 UTC m=+1293.606427169" watchObservedRunningTime="2026-02-03 07:07:35.328519554 +0000 UTC m=+1293.615213360" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.346445 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.288687715 podStartE2EDuration="5.346430806s" podCreationTimestamp="2026-02-03 07:07:30 +0000 UTC" firstStartedPulling="2026-02-03 07:07:31.601229262 +0000 UTC m=+1289.887923058" lastFinishedPulling="2026-02-03 07:07:34.658972343 +0000 UTC m=+1292.945666149" observedRunningTime="2026-02-03 07:07:35.345564851 +0000 UTC m=+1293.632258657" watchObservedRunningTime="2026-02-03 07:07:35.346430806 +0000 UTC m=+1293.633124612" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.740136 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.806837 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.916456 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:07:35 crc kubenswrapper[4998]: I0203 07:07:35.916772 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:07:36 crc kubenswrapper[4998]: I0203 07:07:36.304268 4998 generic.go:334] "Generic (PLEG): container finished" podID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerID="5b97509f0940b2ff84d32ba41eb37019e5a72aa74c7cd8d04ce4177519975f8f" exitCode=143 Feb 03 07:07:36 crc kubenswrapper[4998]: I0203 07:07:36.304550 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerDied","Data":"5b97509f0940b2ff84d32ba41eb37019e5a72aa74c7cd8d04ce4177519975f8f"} Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.346132 4998 generic.go:334] "Generic (PLEG): container finished" podID="0d737e69-6547-48c5-8bbf-7ca34468e8f6" containerID="52f356947435219b0263bbbd5ce037049ed715085f6b511a973666bee8f1da89" exitCode=0 Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.346238 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" event={"ID":"0d737e69-6547-48c5-8bbf-7ca34468e8f6","Type":"ContainerDied","Data":"52f356947435219b0263bbbd5ce037049ed715085f6b511a973666bee8f1da89"} Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.348795 4998 generic.go:334] "Generic (PLEG): container finished" podID="437ab265-6a33-4c00-8afc-707827fcf7d3" containerID="29722612d0982233b88843f2faf98b88c223434bc9d5d4ecba495fb7bf4b1ab7" exitCode=0 Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.348848 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ml2n9" event={"ID":"437ab265-6a33-4c00-8afc-707827fcf7d3","Type":"ContainerDied","Data":"29722612d0982233b88843f2faf98b88c223434bc9d5d4ecba495fb7bf4b1ab7"} Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.796627 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.796947 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.806610 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.845581 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:07:40 crc kubenswrapper[4998]: I0203 07:07:40.985935 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.059485 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.059770 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="dnsmasq-dns" containerID="cri-o://6fe430dd90d58b91605ceada00d3f048616c4e1503817e5ff5e42627acce92af" gracePeriod=10 Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.373184 4998 generic.go:334] "Generic (PLEG): container finished" podID="be1884cc-0fc6-4769-a362-5d66be382f11" containerID="6fe430dd90d58b91605ceada00d3f048616c4e1503817e5ff5e42627acce92af" exitCode=0 Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.373297 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" event={"ID":"be1884cc-0fc6-4769-a362-5d66be382f11","Type":"ContainerDied","Data":"6fe430dd90d58b91605ceada00d3f048616c4e1503817e5ff5e42627acce92af"} Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.422147 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.854055 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.878971 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.878994 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915517 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915656 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915686 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qtqm\" (UniqueName: \"kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915745 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915804 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.915832 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb\") pod \"be1884cc-0fc6-4769-a362-5d66be382f11\" (UID: \"be1884cc-0fc6-4769-a362-5d66be382f11\") " Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.933912 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm" (OuterVolumeSpecName: "kube-api-access-6qtqm") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "kube-api-access-6qtqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.962277 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.969084 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:41 crc kubenswrapper[4998]: I0203 07:07:41.995937 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config" (OuterVolumeSpecName: "config") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.017530 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts\") pod \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.017666 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle\") pod \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.017704 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data\") pod \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.017753 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdvp\" (UniqueName: \"kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp\") pod \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\" (UID: \"0d737e69-6547-48c5-8bbf-7ca34468e8f6\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.018389 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.018409 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qtqm\" (UniqueName: \"kubernetes.io/projected/be1884cc-0fc6-4769-a362-5d66be382f11-kube-api-access-6qtqm\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.025735 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts" (OuterVolumeSpecName: "scripts") pod "0d737e69-6547-48c5-8bbf-7ca34468e8f6" (UID: "0d737e69-6547-48c5-8bbf-7ca34468e8f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.026449 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.026634 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.030137 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp" (OuterVolumeSpecName: "kube-api-access-8tdvp") pod "0d737e69-6547-48c5-8bbf-7ca34468e8f6" (UID: "0d737e69-6547-48c5-8bbf-7ca34468e8f6"). InnerVolumeSpecName "kube-api-access-8tdvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.047805 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.052395 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data" (OuterVolumeSpecName: "config-data") pod "0d737e69-6547-48c5-8bbf-7ca34468e8f6" (UID: "0d737e69-6547-48c5-8bbf-7ca34468e8f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.059480 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "be1884cc-0fc6-4769-a362-5d66be382f11" (UID: "be1884cc-0fc6-4769-a362-5d66be382f11"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.068014 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d737e69-6547-48c5-8bbf-7ca34468e8f6" (UID: "0d737e69-6547-48c5-8bbf-7ca34468e8f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.119547 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdn5c\" (UniqueName: \"kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c\") pod \"437ab265-6a33-4c00-8afc-707827fcf7d3\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.119597 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data\") pod \"437ab265-6a33-4c00-8afc-707827fcf7d3\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.119880 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts\") pod \"437ab265-6a33-4c00-8afc-707827fcf7d3\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.119908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle\") pod \"437ab265-6a33-4c00-8afc-707827fcf7d3\" (UID: \"437ab265-6a33-4c00-8afc-707827fcf7d3\") " Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120266 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120282 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120292 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120301 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d737e69-6547-48c5-8bbf-7ca34468e8f6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120310 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdvp\" (UniqueName: \"kubernetes.io/projected/0d737e69-6547-48c5-8bbf-7ca34468e8f6-kube-api-access-8tdvp\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120319 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120327 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.120335 4998 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/be1884cc-0fc6-4769-a362-5d66be382f11-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.122799 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c" (OuterVolumeSpecName: "kube-api-access-rdn5c") pod "437ab265-6a33-4c00-8afc-707827fcf7d3" (UID: "437ab265-6a33-4c00-8afc-707827fcf7d3"). InnerVolumeSpecName "kube-api-access-rdn5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.123308 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts" (OuterVolumeSpecName: "scripts") pod "437ab265-6a33-4c00-8afc-707827fcf7d3" (UID: "437ab265-6a33-4c00-8afc-707827fcf7d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.144060 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "437ab265-6a33-4c00-8afc-707827fcf7d3" (UID: "437ab265-6a33-4c00-8afc-707827fcf7d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.144170 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data" (OuterVolumeSpecName: "config-data") pod "437ab265-6a33-4c00-8afc-707827fcf7d3" (UID: "437ab265-6a33-4c00-8afc-707827fcf7d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.222167 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.222202 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.222214 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdn5c\" (UniqueName: \"kubernetes.io/projected/437ab265-6a33-4c00-8afc-707827fcf7d3-kube-api-access-rdn5c\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.222225 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/437ab265-6a33-4c00-8afc-707827fcf7d3-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.384295 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ml2n9" event={"ID":"437ab265-6a33-4c00-8afc-707827fcf7d3","Type":"ContainerDied","Data":"a2b48a5a2f2edfff137f271fb584c7757ddb5daa28e6683759e7626347deccd3"} Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.384336 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2b48a5a2f2edfff137f271fb584c7757ddb5daa28e6683759e7626347deccd3" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.384335 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ml2n9" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.395273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" event={"ID":"0d737e69-6547-48c5-8bbf-7ca34468e8f6","Type":"ContainerDied","Data":"7107b53a3eb587ff78ab5fb53dbdb3fa3118b30d20c307ea5c859b7fde4b8a97"} Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.395309 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7107b53a3eb587ff78ab5fb53dbdb3fa3118b30d20c307ea5c859b7fde4b8a97" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.395378 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kmnkm" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.402097 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.402283 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849fd69845-bz7pb" event={"ID":"be1884cc-0fc6-4769-a362-5d66be382f11","Type":"ContainerDied","Data":"0befc134c682f11b572ccd527b6e65ab83d3ff579ab7940871fbca0d84437032"} Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.402320 4998 scope.go:117] "RemoveContainer" containerID="6fe430dd90d58b91605ceada00d3f048616c4e1503817e5ff5e42627acce92af" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.458047 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:07:42 crc kubenswrapper[4998]: E0203 07:07:42.459091 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="dnsmasq-dns" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459112 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="dnsmasq-dns" Feb 03 07:07:42 crc kubenswrapper[4998]: E0203 07:07:42.459131 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="init" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459139 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="init" Feb 03 07:07:42 crc kubenswrapper[4998]: E0203 07:07:42.459147 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d737e69-6547-48c5-8bbf-7ca34468e8f6" containerName="nova-cell1-conductor-db-sync" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459153 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d737e69-6547-48c5-8bbf-7ca34468e8f6" containerName="nova-cell1-conductor-db-sync" Feb 03 07:07:42 crc kubenswrapper[4998]: E0203 07:07:42.459176 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="437ab265-6a33-4c00-8afc-707827fcf7d3" containerName="nova-manage" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459182 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="437ab265-6a33-4c00-8afc-707827fcf7d3" containerName="nova-manage" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459339 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="437ab265-6a33-4c00-8afc-707827fcf7d3" containerName="nova-manage" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459350 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" containerName="dnsmasq-dns" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.459367 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d737e69-6547-48c5-8bbf-7ca34468e8f6" containerName="nova-cell1-conductor-db-sync" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.460005 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.463383 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.465509 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.474423 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.484226 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-849fd69845-bz7pb"] Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.523497 4998 scope.go:117] "RemoveContainer" containerID="a15f2ca3a4307e219840bd2fea5f854f35d3e08e1aa50784744aa891be10aa54" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.538314 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvmf5\" (UniqueName: \"kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.538381 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.538449 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.580763 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.581272 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-log" containerID="cri-o://4a88c17630989ed8239da39c28bf5da3949e9fc5efa6d80626f8a6b62e6fe5df" gracePeriod=30 Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.581359 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-api" containerID="cri-o://54dbd2cecba320b2fc9213ed3e654126b77a791838b0470017988ad2d984d176" gracePeriod=30 Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.595343 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.639703 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvmf5\" (UniqueName: \"kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.639770 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.639883 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.645063 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.646926 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.660327 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvmf5\" (UniqueName: \"kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5\") pod \"nova-cell1-conductor-0\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:42 crc kubenswrapper[4998]: I0203 07:07:42.780831 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:43 crc kubenswrapper[4998]: I0203 07:07:43.375715 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:07:43 crc kubenswrapper[4998]: W0203 07:07:43.391742 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d04a830_0b11_4766_b9bd_56a6f4b740ca.slice/crio-00f0c3127085073649953819f7d46f3820f2d37eebd2b1d90b011d11858cdbe8 WatchSource:0}: Error finding container 00f0c3127085073649953819f7d46f3820f2d37eebd2b1d90b011d11858cdbe8: Status 404 returned error can't find the container with id 00f0c3127085073649953819f7d46f3820f2d37eebd2b1d90b011d11858cdbe8 Feb 03 07:07:43 crc kubenswrapper[4998]: I0203 07:07:43.412995 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7d04a830-0b11-4766-b9bd-56a6f4b740ca","Type":"ContainerStarted","Data":"00f0c3127085073649953819f7d46f3820f2d37eebd2b1d90b011d11858cdbe8"} Feb 03 07:07:43 crc kubenswrapper[4998]: I0203 07:07:43.416611 4998 generic.go:334] "Generic (PLEG): container finished" podID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerID="4a88c17630989ed8239da39c28bf5da3949e9fc5efa6d80626f8a6b62e6fe5df" exitCode=143 Feb 03 07:07:43 crc kubenswrapper[4998]: I0203 07:07:43.416671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerDied","Data":"4a88c17630989ed8239da39c28bf5da3949e9fc5efa6d80626f8a6b62e6fe5df"} Feb 03 07:07:43 crc kubenswrapper[4998]: I0203 07:07:43.419481 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="06818903-6457-43bf-880e-8b29514a8a08" containerName="nova-scheduler-scheduler" containerID="cri-o://489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" gracePeriod=30 Feb 03 07:07:44 crc kubenswrapper[4998]: I0203 07:07:44.449803 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.449766632 podStartE2EDuration="2.449766632s" podCreationTimestamp="2026-02-03 07:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:44.442310349 +0000 UTC m=+1302.729004175" watchObservedRunningTime="2026-02-03 07:07:44.449766632 +0000 UTC m=+1302.736460438" Feb 03 07:07:44 crc kubenswrapper[4998]: I0203 07:07:44.457898 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be1884cc-0fc6-4769-a362-5d66be382f11" path="/var/lib/kubelet/pods/be1884cc-0fc6-4769-a362-5d66be382f11/volumes" Feb 03 07:07:44 crc kubenswrapper[4998]: I0203 07:07:44.459066 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7d04a830-0b11-4766-b9bd-56a6f4b740ca","Type":"ContainerStarted","Data":"1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635"} Feb 03 07:07:44 crc kubenswrapper[4998]: I0203 07:07:44.459215 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:45 crc kubenswrapper[4998]: E0203 07:07:45.809135 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:07:45 crc kubenswrapper[4998]: E0203 07:07:45.811233 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:07:45 crc kubenswrapper[4998]: E0203 07:07:45.813722 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:07:45 crc kubenswrapper[4998]: E0203 07:07:45.813810 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="06818903-6457-43bf-880e-8b29514a8a08" containerName="nova-scheduler-scheduler" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.293559 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.381715 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.446628 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data\") pod \"06818903-6457-43bf-880e-8b29514a8a08\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.446683 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") pod \"06818903-6457-43bf-880e-8b29514a8a08\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.446714 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rcnl\" (UniqueName: \"kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl\") pod \"06818903-6457-43bf-880e-8b29514a8a08\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.465075 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl" (OuterVolumeSpecName: "kube-api-access-7rcnl") pod "06818903-6457-43bf-880e-8b29514a8a08" (UID: "06818903-6457-43bf-880e-8b29514a8a08"). InnerVolumeSpecName "kube-api-access-7rcnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.466233 4998 generic.go:334] "Generic (PLEG): container finished" podID="06818903-6457-43bf-880e-8b29514a8a08" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" exitCode=0 Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.466277 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06818903-6457-43bf-880e-8b29514a8a08","Type":"ContainerDied","Data":"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa"} Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.466316 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06818903-6457-43bf-880e-8b29514a8a08","Type":"ContainerDied","Data":"44efb5e262378e3ea5987c8e71172a0619d98b141c8ecd1c39a3aa0b308a48f1"} Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.466336 4998 scope.go:117] "RemoveContainer" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.466477 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:47 crc kubenswrapper[4998]: E0203 07:07:47.477044 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle podName:06818903-6457-43bf-880e-8b29514a8a08 nodeName:}" failed. No retries permitted until 2026-02-03 07:07:47.977016903 +0000 UTC m=+1306.263710709 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle") pod "06818903-6457-43bf-880e-8b29514a8a08" (UID: "06818903-6457-43bf-880e-8b29514a8a08") : error deleting /var/lib/kubelet/pods/06818903-6457-43bf-880e-8b29514a8a08/volume-subpaths: remove /var/lib/kubelet/pods/06818903-6457-43bf-880e-8b29514a8a08/volume-subpaths: no such file or directory Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.481270 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data" (OuterVolumeSpecName: "config-data") pod "06818903-6457-43bf-880e-8b29514a8a08" (UID: "06818903-6457-43bf-880e-8b29514a8a08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.549366 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.549600 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rcnl\" (UniqueName: \"kubernetes.io/projected/06818903-6457-43bf-880e-8b29514a8a08-kube-api-access-7rcnl\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.560492 4998 scope.go:117] "RemoveContainer" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" Feb 03 07:07:47 crc kubenswrapper[4998]: E0203 07:07:47.560922 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa\": container with ID starting with 489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa not found: ID does not exist" containerID="489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa" Feb 03 07:07:47 crc kubenswrapper[4998]: I0203 07:07:47.560968 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa"} err="failed to get container status \"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa\": rpc error: code = NotFound desc = could not find container \"489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa\": container with ID starting with 489aa80d4620b45b72b6036eee3b1a56613964548de8bfb4462feabb0a1495fa not found: ID does not exist" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.058020 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") pod \"06818903-6457-43bf-880e-8b29514a8a08\" (UID: \"06818903-6457-43bf-880e-8b29514a8a08\") " Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.068445 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06818903-6457-43bf-880e-8b29514a8a08" (UID: "06818903-6457-43bf-880e-8b29514a8a08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.160886 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06818903-6457-43bf-880e-8b29514a8a08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.400467 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.408862 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.440276 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06818903-6457-43bf-880e-8b29514a8a08" path="/var/lib/kubelet/pods/06818903-6457-43bf-880e-8b29514a8a08/volumes" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.441099 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:48 crc kubenswrapper[4998]: E0203 07:07:48.441445 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06818903-6457-43bf-880e-8b29514a8a08" containerName="nova-scheduler-scheduler" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.441522 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="06818903-6457-43bf-880e-8b29514a8a08" containerName="nova-scheduler-scheduler" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.441820 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="06818903-6457-43bf-880e-8b29514a8a08" containerName="nova-scheduler-scheduler" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.442623 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.445026 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.447689 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.481956 4998 generic.go:334] "Generic (PLEG): container finished" podID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerID="54dbd2cecba320b2fc9213ed3e654126b77a791838b0470017988ad2d984d176" exitCode=0 Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.482012 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerDied","Data":"54dbd2cecba320b2fc9213ed3e654126b77a791838b0470017988ad2d984d176"} Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.482038 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6928192e-b9ad-4cf6-8e06-2ec228eed126","Type":"ContainerDied","Data":"82effbcb9b24f9a96d2f1e8d36b7d54690767179859d9dc717649b69f9db9906"} Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.482049 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82effbcb9b24f9a96d2f1e8d36b7d54690767179859d9dc717649b69f9db9906" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.496564 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.569386 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data\") pod \"6928192e-b9ad-4cf6-8e06-2ec228eed126\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.569568 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swrjb\" (UniqueName: \"kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb\") pod \"6928192e-b9ad-4cf6-8e06-2ec228eed126\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.569694 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle\") pod \"6928192e-b9ad-4cf6-8e06-2ec228eed126\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.569822 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs\") pod \"6928192e-b9ad-4cf6-8e06-2ec228eed126\" (UID: \"6928192e-b9ad-4cf6-8e06-2ec228eed126\") " Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.570297 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqxzb\" (UniqueName: \"kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.570347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.570413 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.570811 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs" (OuterVolumeSpecName: "logs") pod "6928192e-b9ad-4cf6-8e06-2ec228eed126" (UID: "6928192e-b9ad-4cf6-8e06-2ec228eed126"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.571757 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6928192e-b9ad-4cf6-8e06-2ec228eed126-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.573833 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb" (OuterVolumeSpecName: "kube-api-access-swrjb") pod "6928192e-b9ad-4cf6-8e06-2ec228eed126" (UID: "6928192e-b9ad-4cf6-8e06-2ec228eed126"). InnerVolumeSpecName "kube-api-access-swrjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.595398 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data" (OuterVolumeSpecName: "config-data") pod "6928192e-b9ad-4cf6-8e06-2ec228eed126" (UID: "6928192e-b9ad-4cf6-8e06-2ec228eed126"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.599128 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6928192e-b9ad-4cf6-8e06-2ec228eed126" (UID: "6928192e-b9ad-4cf6-8e06-2ec228eed126"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674084 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqxzb\" (UniqueName: \"kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674149 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674192 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674353 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674368 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swrjb\" (UniqueName: \"kubernetes.io/projected/6928192e-b9ad-4cf6-8e06-2ec228eed126-kube-api-access-swrjb\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.674383 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6928192e-b9ad-4cf6-8e06-2ec228eed126-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.678594 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.679258 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.702239 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqxzb\" (UniqueName: \"kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb\") pod \"nova-scheduler-0\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " pod="openstack/nova-scheduler-0" Feb 03 07:07:48 crc kubenswrapper[4998]: I0203 07:07:48.813140 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.255863 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:07:49 crc kubenswrapper[4998]: W0203 07:07:49.259797 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3c7cdda_009a_4dea_a2ab_3e093e5c39c6.slice/crio-573e6aa57b9c1710b70a9c2c1fdaf3f3927711abb65e4a2f388232d317600e00 WatchSource:0}: Error finding container 573e6aa57b9c1710b70a9c2c1fdaf3f3927711abb65e4a2f388232d317600e00: Status 404 returned error can't find the container with id 573e6aa57b9c1710b70a9c2c1fdaf3f3927711abb65e4a2f388232d317600e00 Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.498259 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.502541 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6","Type":"ContainerStarted","Data":"5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c"} Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.502590 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6","Type":"ContainerStarted","Data":"573e6aa57b9c1710b70a9c2c1fdaf3f3927711abb65e4a2f388232d317600e00"} Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.520626 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.520607635 podStartE2EDuration="1.520607635s" podCreationTimestamp="2026-02-03 07:07:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:49.515098518 +0000 UTC m=+1307.801792344" watchObservedRunningTime="2026-02-03 07:07:49.520607635 +0000 UTC m=+1307.807301441" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.546148 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.555246 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.567036 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:49 crc kubenswrapper[4998]: E0203 07:07:49.567499 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-api" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.567564 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-api" Feb 03 07:07:49 crc kubenswrapper[4998]: E0203 07:07:49.567605 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-log" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.567613 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-log" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.567853 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-log" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.567887 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" containerName="nova-api-api" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.568978 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.571288 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.579217 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.701392 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.701504 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j84qg\" (UniqueName: \"kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.701622 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.701669 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.802928 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.803183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.803245 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j84qg\" (UniqueName: \"kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.803333 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.804395 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.809634 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.810090 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.838000 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j84qg\" (UniqueName: \"kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg\") pod \"nova-api-0\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " pod="openstack/nova-api-0" Feb 03 07:07:49 crc kubenswrapper[4998]: I0203 07:07:49.884236 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:07:50 crc kubenswrapper[4998]: I0203 07:07:50.438408 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6928192e-b9ad-4cf6-8e06-2ec228eed126" path="/var/lib/kubelet/pods/6928192e-b9ad-4cf6-8e06-2ec228eed126/volumes" Feb 03 07:07:50 crc kubenswrapper[4998]: I0203 07:07:50.477264 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:07:50 crc kubenswrapper[4998]: W0203 07:07:50.480719 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12b26172_a00e_4d89_9129_00fe6712e6c7.slice/crio-52ab47d7a50f697c7a3597263dd0ad106dcf990467b66c52b4f44d86375c6162 WatchSource:0}: Error finding container 52ab47d7a50f697c7a3597263dd0ad106dcf990467b66c52b4f44d86375c6162: Status 404 returned error can't find the container with id 52ab47d7a50f697c7a3597263dd0ad106dcf990467b66c52b4f44d86375c6162 Feb 03 07:07:50 crc kubenswrapper[4998]: I0203 07:07:50.512083 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerStarted","Data":"52ab47d7a50f697c7a3597263dd0ad106dcf990467b66c52b4f44d86375c6162"} Feb 03 07:07:51 crc kubenswrapper[4998]: I0203 07:07:51.523240 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerStarted","Data":"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e"} Feb 03 07:07:51 crc kubenswrapper[4998]: I0203 07:07:51.523496 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerStarted","Data":"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b"} Feb 03 07:07:51 crc kubenswrapper[4998]: I0203 07:07:51.779603 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.779581362 podStartE2EDuration="2.779581362s" podCreationTimestamp="2026-02-03 07:07:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:07:51.555167161 +0000 UTC m=+1309.841860987" watchObservedRunningTime="2026-02-03 07:07:51.779581362 +0000 UTC m=+1310.066275178" Feb 03 07:07:51 crc kubenswrapper[4998]: I0203 07:07:51.787911 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:51 crc kubenswrapper[4998]: I0203 07:07:51.788136 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" containerName="kube-state-metrics" containerID="cri-o://4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c" gracePeriod=30 Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.284458 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.456401 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp8mf\" (UniqueName: \"kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf\") pod \"217d1e8d-a95d-4152-a8d9-e843cd3b7260\" (UID: \"217d1e8d-a95d-4152-a8d9-e843cd3b7260\") " Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.469027 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf" (OuterVolumeSpecName: "kube-api-access-gp8mf") pod "217d1e8d-a95d-4152-a8d9-e843cd3b7260" (UID: "217d1e8d-a95d-4152-a8d9-e843cd3b7260"). InnerVolumeSpecName "kube-api-access-gp8mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.533939 4998 generic.go:334] "Generic (PLEG): container finished" podID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" containerID="4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c" exitCode=2 Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.534000 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.534016 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"217d1e8d-a95d-4152-a8d9-e843cd3b7260","Type":"ContainerDied","Data":"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c"} Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.534059 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"217d1e8d-a95d-4152-a8d9-e843cd3b7260","Type":"ContainerDied","Data":"4327206695f6fc4ed7a7665b971c8e9f1a942858a93e90a6e29cef9a68e47826"} Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.534077 4998 scope.go:117] "RemoveContainer" containerID="4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.562306 4998 scope.go:117] "RemoveContainer" containerID="4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c" Feb 03 07:07:52 crc kubenswrapper[4998]: E0203 07:07:52.562738 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c\": container with ID starting with 4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c not found: ID does not exist" containerID="4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.562766 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c"} err="failed to get container status \"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c\": rpc error: code = NotFound desc = could not find container \"4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c\": container with ID starting with 4269fce0bda4ee5b3b2f866da6decc1780be724837c0c217232b9223984a0c4c not found: ID does not exist" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.563286 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp8mf\" (UniqueName: \"kubernetes.io/projected/217d1e8d-a95d-4152-a8d9-e843cd3b7260-kube-api-access-gp8mf\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.576143 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.585752 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.593740 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:52 crc kubenswrapper[4998]: E0203 07:07:52.594173 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" containerName="kube-state-metrics" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.594190 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" containerName="kube-state-metrics" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.594380 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" containerName="kube-state-metrics" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.594967 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.597606 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.602075 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.613550 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.766291 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.766367 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xkjp\" (UniqueName: \"kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.766583 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.766648 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.810276 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.868194 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xkjp\" (UniqueName: \"kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.868286 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.868310 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.868396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.872153 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.872740 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.873478 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.887216 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xkjp\" (UniqueName: \"kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp\") pod \"kube-state-metrics-0\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " pod="openstack/kube-state-metrics-0" Feb 03 07:07:52 crc kubenswrapper[4998]: I0203 07:07:52.916991 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:07:53 crc kubenswrapper[4998]: W0203 07:07:53.376181 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11bbac6a_fd7e_447b_af99_d0ebada848df.slice/crio-a0f85722fdd3ee4fadb797fc4382cc57942cd4b679971803845a89ce09bfe1e2 WatchSource:0}: Error finding container a0f85722fdd3ee4fadb797fc4382cc57942cd4b679971803845a89ce09bfe1e2: Status 404 returned error can't find the container with id a0f85722fdd3ee4fadb797fc4382cc57942cd4b679971803845a89ce09bfe1e2 Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.377027 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.543606 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"11bbac6a-fd7e-447b-af99-d0ebada848df","Type":"ContainerStarted","Data":"a0f85722fdd3ee4fadb797fc4382cc57942cd4b679971803845a89ce09bfe1e2"} Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.814768 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.951615 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.951949 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-central-agent" containerID="cri-o://399d45913cb9eab8eb1e4267fcd7e6d1cfdce55f450959649fbc69c1df47bced" gracePeriod=30 Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.951978 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="proxy-httpd" containerID="cri-o://e1ca138319732aaeed7b3eca0957a664db0ff4d7e9e21ac834109166de0a53de" gracePeriod=30 Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.952010 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="sg-core" containerID="cri-o://33df0fedaa8a6ba9ced2dc1af5357329eb9b9365b1144e9121248a8b0882f833" gracePeriod=30 Feb 03 07:07:53 crc kubenswrapper[4998]: I0203 07:07:53.952046 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-notification-agent" containerID="cri-o://505e445e1d61f5314d4840da4a3de1b3067fcf492e8165992044b9893ba78241" gracePeriod=30 Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.437110 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="217d1e8d-a95d-4152-a8d9-e843cd3b7260" path="/var/lib/kubelet/pods/217d1e8d-a95d-4152-a8d9-e843cd3b7260/volumes" Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556035 4998 generic.go:334] "Generic (PLEG): container finished" podID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerID="e1ca138319732aaeed7b3eca0957a664db0ff4d7e9e21ac834109166de0a53de" exitCode=0 Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556068 4998 generic.go:334] "Generic (PLEG): container finished" podID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerID="33df0fedaa8a6ba9ced2dc1af5357329eb9b9365b1144e9121248a8b0882f833" exitCode=2 Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556075 4998 generic.go:334] "Generic (PLEG): container finished" podID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerID="399d45913cb9eab8eb1e4267fcd7e6d1cfdce55f450959649fbc69c1df47bced" exitCode=0 Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556114 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerDied","Data":"e1ca138319732aaeed7b3eca0957a664db0ff4d7e9e21ac834109166de0a53de"} Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556181 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerDied","Data":"33df0fedaa8a6ba9ced2dc1af5357329eb9b9365b1144e9121248a8b0882f833"} Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.556197 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerDied","Data":"399d45913cb9eab8eb1e4267fcd7e6d1cfdce55f450959649fbc69c1df47bced"} Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.558541 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"11bbac6a-fd7e-447b-af99-d0ebada848df","Type":"ContainerStarted","Data":"aa6b3f085720a2a5d6f33b6f4c5735de460cf2e60593dc25d3f9bedcbf5d7741"} Feb 03 07:07:54 crc kubenswrapper[4998]: I0203 07:07:54.558688 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.569877 4998 generic.go:334] "Generic (PLEG): container finished" podID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerID="505e445e1d61f5314d4840da4a3de1b3067fcf492e8165992044b9893ba78241" exitCode=0 Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.569915 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerDied","Data":"505e445e1d61f5314d4840da4a3de1b3067fcf492e8165992044b9893ba78241"} Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.569967 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3614c866-f0e0-433c-9bc1-9601e83c479f","Type":"ContainerDied","Data":"fdc89ff3daa4d811529c0edcb49de816aefb6cf6cb3a862a790c68a09afb5557"} Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.569983 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdc89ff3daa4d811529c0edcb49de816aefb6cf6cb3a862a790c68a09afb5557" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.608895 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.632621 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.275286978 podStartE2EDuration="3.632607068s" podCreationTimestamp="2026-02-03 07:07:52 +0000 UTC" firstStartedPulling="2026-02-03 07:07:53.37887331 +0000 UTC m=+1311.665567116" lastFinishedPulling="2026-02-03 07:07:53.7361934 +0000 UTC m=+1312.022887206" observedRunningTime="2026-02-03 07:07:54.590968505 +0000 UTC m=+1312.877662341" watchObservedRunningTime="2026-02-03 07:07:55.632607068 +0000 UTC m=+1313.919300874" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728110 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvk27\" (UniqueName: \"kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728218 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728244 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728306 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728324 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.728407 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data\") pod \"3614c866-f0e0-433c-9bc1-9601e83c479f\" (UID: \"3614c866-f0e0-433c-9bc1-9601e83c479f\") " Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.729064 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.729616 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.734020 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27" (OuterVolumeSpecName: "kube-api-access-fvk27") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "kube-api-access-fvk27". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.734017 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts" (OuterVolumeSpecName: "scripts") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.756433 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.822717 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830195 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830223 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvk27\" (UniqueName: \"kubernetes.io/projected/3614c866-f0e0-433c-9bc1-9601e83c479f-kube-api-access-fvk27\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830235 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3614c866-f0e0-433c-9bc1-9601e83c479f-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830245 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830256 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.830266 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.832437 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data" (OuterVolumeSpecName: "config-data") pod "3614c866-f0e0-433c-9bc1-9601e83c479f" (UID: "3614c866-f0e0-433c-9bc1-9601e83c479f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:07:55 crc kubenswrapper[4998]: I0203 07:07:55.931691 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3614c866-f0e0-433c-9bc1-9601e83c479f-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.581084 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.603249 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.615222 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.624797 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:56 crc kubenswrapper[4998]: E0203 07:07:56.625140 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-notification-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625157 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-notification-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: E0203 07:07:56.625180 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="sg-core" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625187 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="sg-core" Feb 03 07:07:56 crc kubenswrapper[4998]: E0203 07:07:56.625206 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="proxy-httpd" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625212 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="proxy-httpd" Feb 03 07:07:56 crc kubenswrapper[4998]: E0203 07:07:56.625223 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-central-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625229 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-central-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625383 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-notification-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625396 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="proxy-httpd" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625406 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="ceilometer-central-agent" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.625424 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" containerName="sg-core" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.626950 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.629457 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.629820 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.629975 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.641770 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745563 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745664 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlpgr\" (UniqueName: \"kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745699 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745731 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745908 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.745979 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.746025 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.746063 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848488 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848581 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlpgr\" (UniqueName: \"kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848606 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848638 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848674 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848702 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848725 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.848750 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.849245 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.849279 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.852754 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.853289 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.853330 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.853497 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.871990 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlpgr\" (UniqueName: \"kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.873131 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data\") pod \"ceilometer-0\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " pod="openstack/ceilometer-0" Feb 03 07:07:56 crc kubenswrapper[4998]: I0203 07:07:56.942685 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:07:57 crc kubenswrapper[4998]: I0203 07:07:57.372510 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:07:57 crc kubenswrapper[4998]: W0203 07:07:57.374032 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod948b79ac_ffa3_4848_8800_462eb894cb0c.slice/crio-db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a WatchSource:0}: Error finding container db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a: Status 404 returned error can't find the container with id db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a Feb 03 07:07:57 crc kubenswrapper[4998]: I0203 07:07:57.592089 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerStarted","Data":"db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a"} Feb 03 07:07:58 crc kubenswrapper[4998]: I0203 07:07:58.438917 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3614c866-f0e0-433c-9bc1-9601e83c479f" path="/var/lib/kubelet/pods/3614c866-f0e0-433c-9bc1-9601e83c479f/volumes" Feb 03 07:07:58 crc kubenswrapper[4998]: I0203 07:07:58.602459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerStarted","Data":"bdfed3c637d5a7e646aafe270bfdd002d1e9e70fc02fb8311d733fa26a2bd587"} Feb 03 07:07:58 crc kubenswrapper[4998]: I0203 07:07:58.936805 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:07:58 crc kubenswrapper[4998]: I0203 07:07:58.978522 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:07:59 crc kubenswrapper[4998]: I0203 07:07:59.614019 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerStarted","Data":"5d7d41cca39d4807855aca639882004c6a0546d3132beb684028e22eee232e64"} Feb 03 07:07:59 crc kubenswrapper[4998]: I0203 07:07:59.641573 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:07:59 crc kubenswrapper[4998]: I0203 07:07:59.885004 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:07:59 crc kubenswrapper[4998]: I0203 07:07:59.885333 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:08:00 crc kubenswrapper[4998]: I0203 07:08:00.645627 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerStarted","Data":"68b523af09a7e35d7d9fbb9ab1b0fab68d289b1089cd6f955bee40a1839a9c65"} Feb 03 07:08:00 crc kubenswrapper[4998]: I0203 07:08:00.936976 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:00 crc kubenswrapper[4998]: I0203 07:08:00.978028 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:02 crc kubenswrapper[4998]: I0203 07:08:02.664695 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerStarted","Data":"bcba74e1e0494391ede410b57660f2f6f4c653650d3a0894b654391063d77965"} Feb 03 07:08:02 crc kubenswrapper[4998]: I0203 07:08:02.665312 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:08:02 crc kubenswrapper[4998]: I0203 07:08:02.689251 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.676467319 podStartE2EDuration="6.689230252s" podCreationTimestamp="2026-02-03 07:07:56 +0000 UTC" firstStartedPulling="2026-02-03 07:07:57.377120285 +0000 UTC m=+1315.663814091" lastFinishedPulling="2026-02-03 07:08:02.389883218 +0000 UTC m=+1320.676577024" observedRunningTime="2026-02-03 07:08:02.687208074 +0000 UTC m=+1320.973901880" watchObservedRunningTime="2026-02-03 07:08:02.689230252 +0000 UTC m=+1320.975924058" Feb 03 07:08:02 crc kubenswrapper[4998]: I0203 07:08:02.930277 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.701965 4998 generic.go:334] "Generic (PLEG): container finished" podID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerID="67f41d70a452f5e2318d430f8ca6dbbc9d678bac6ed6dbeb680ad9f6fab077e5" exitCode=137 Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.702038 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerDied","Data":"67f41d70a452f5e2318d430f8ca6dbbc9d678bac6ed6dbeb680ad9f6fab077e5"} Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.702584 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab00de0e-0ad9-4085-abff-d9383cfeb712","Type":"ContainerDied","Data":"098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf"} Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.702603 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098a9e764b727f8b132ed5460b5255dfe6aebbbd821d63161f674bccadb32daf" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.704604 4998 generic.go:334] "Generic (PLEG): container finished" podID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" containerID="d4267783ff77c4a1082e7b0ede7b9c76fef91525658a5053c8142da8e9ca8397" exitCode=137 Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.704638 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"acfcdb86-84f3-4f51-b3d2-57be7f9694f6","Type":"ContainerDied","Data":"d4267783ff77c4a1082e7b0ede7b9c76fef91525658a5053c8142da8e9ca8397"} Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.708599 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.761112 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle\") pod \"ab00de0e-0ad9-4085-abff-d9383cfeb712\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.761234 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data\") pod \"ab00de0e-0ad9-4085-abff-d9383cfeb712\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.761264 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs\") pod \"ab00de0e-0ad9-4085-abff-d9383cfeb712\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.761322 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmvpv\" (UniqueName: \"kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv\") pod \"ab00de0e-0ad9-4085-abff-d9383cfeb712\" (UID: \"ab00de0e-0ad9-4085-abff-d9383cfeb712\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.765010 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs" (OuterVolumeSpecName: "logs") pod "ab00de0e-0ad9-4085-abff-d9383cfeb712" (UID: "ab00de0e-0ad9-4085-abff-d9383cfeb712"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.768055 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv" (OuterVolumeSpecName: "kube-api-access-kmvpv") pod "ab00de0e-0ad9-4085-abff-d9383cfeb712" (UID: "ab00de0e-0ad9-4085-abff-d9383cfeb712"). InnerVolumeSpecName "kube-api-access-kmvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.786986 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab00de0e-0ad9-4085-abff-d9383cfeb712" (UID: "ab00de0e-0ad9-4085-abff-d9383cfeb712"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.787626 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data" (OuterVolumeSpecName: "config-data") pod "ab00de0e-0ad9-4085-abff-d9383cfeb712" (UID: "ab00de0e-0ad9-4085-abff-d9383cfeb712"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.812467 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.862648 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.862987 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab00de0e-0ad9-4085-abff-d9383cfeb712-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.863049 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmvpv\" (UniqueName: \"kubernetes.io/projected/ab00de0e-0ad9-4085-abff-d9383cfeb712-kube-api-access-kmvpv\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.863106 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab00de0e-0ad9-4085-abff-d9383cfeb712-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.964255 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle\") pod \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.964298 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl56m\" (UniqueName: \"kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m\") pod \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.964329 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data\") pod \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\" (UID: \"acfcdb86-84f3-4f51-b3d2-57be7f9694f6\") " Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.967066 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m" (OuterVolumeSpecName: "kube-api-access-rl56m") pod "acfcdb86-84f3-4f51-b3d2-57be7f9694f6" (UID: "acfcdb86-84f3-4f51-b3d2-57be7f9694f6"). InnerVolumeSpecName "kube-api-access-rl56m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.992996 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data" (OuterVolumeSpecName: "config-data") pod "acfcdb86-84f3-4f51-b3d2-57be7f9694f6" (UID: "acfcdb86-84f3-4f51-b3d2-57be7f9694f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:05 crc kubenswrapper[4998]: I0203 07:08:05.994362 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acfcdb86-84f3-4f51-b3d2-57be7f9694f6" (UID: "acfcdb86-84f3-4f51-b3d2-57be7f9694f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.066737 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.066775 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl56m\" (UniqueName: \"kubernetes.io/projected/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-kube-api-access-rl56m\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.066800 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfcdb86-84f3-4f51-b3d2-57be7f9694f6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.713410 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.713408 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"acfcdb86-84f3-4f51-b3d2-57be7f9694f6","Type":"ContainerDied","Data":"e9da2bbd019da4a3a22738a0b89d2da1236d5d1e85d854d92ac71542e916a2d9"} Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.713445 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.714367 4998 scope.go:117] "RemoveContainer" containerID="d4267783ff77c4a1082e7b0ede7b9c76fef91525658a5053c8142da8e9ca8397" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.742134 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.754072 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.763812 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.777409 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.788875 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: E0203 07:08:06.789282 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-log" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789297 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-log" Feb 03 07:08:06 crc kubenswrapper[4998]: E0203 07:08:06.789313 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789319 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:08:06 crc kubenswrapper[4998]: E0203 07:08:06.789333 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-metadata" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789338 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-metadata" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789503 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-metadata" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789515 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" containerName="nova-metadata-log" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.789527 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.790111 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.793460 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.793694 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.793863 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.813619 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.826395 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.828336 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.830649 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.830864 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.836307 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.992501 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.992562 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993384 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2x9v\" (UniqueName: \"kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993420 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993495 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993616 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgkgf\" (UniqueName: \"kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993670 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993702 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993747 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:06 crc kubenswrapper[4998]: I0203 07:08:06.993811 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095363 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2x9v\" (UniqueName: \"kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095406 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095458 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095522 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgkgf\" (UniqueName: \"kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095552 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095571 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095595 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095617 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095634 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.095655 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.096146 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.100729 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.100747 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.101573 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.101919 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.102882 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.102895 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.103877 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.113888 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2x9v\" (UniqueName: \"kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v\") pod \"nova-metadata-0\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.114570 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgkgf\" (UniqueName: \"kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf\") pod \"nova-cell1-novncproxy-0\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.149852 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.414572 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.583900 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:07 crc kubenswrapper[4998]: W0203 07:08:07.597863 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98d719ac_e7cd_4a10_bed5_5baec4c17f48.slice/crio-d922115110935e3fc45d55e65c99e52fc963452360da9b3d5d1547d0dc0fe3dc WatchSource:0}: Error finding container d922115110935e3fc45d55e65c99e52fc963452360da9b3d5d1547d0dc0fe3dc: Status 404 returned error can't find the container with id d922115110935e3fc45d55e65c99e52fc963452360da9b3d5d1547d0dc0fe3dc Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.724447 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerStarted","Data":"d922115110935e3fc45d55e65c99e52fc963452360da9b3d5d1547d0dc0fe3dc"} Feb 03 07:08:07 crc kubenswrapper[4998]: I0203 07:08:07.877122 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.443310 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab00de0e-0ad9-4085-abff-d9383cfeb712" path="/var/lib/kubelet/pods/ab00de0e-0ad9-4085-abff-d9383cfeb712/volumes" Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.444310 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acfcdb86-84f3-4f51-b3d2-57be7f9694f6" path="/var/lib/kubelet/pods/acfcdb86-84f3-4f51-b3d2-57be7f9694f6/volumes" Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.747818 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eaf9e314-f762-45c1-bab5-3cec9661a787","Type":"ContainerStarted","Data":"daec52a53c154164ed7086c02f290ab1ac36c8f53a2d65c9b1098d2717904264"} Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.747867 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eaf9e314-f762-45c1-bab5-3cec9661a787","Type":"ContainerStarted","Data":"440427a42ba083aadfb2585dd39aed178b32b5152e139574aa5e99ccfe105d71"} Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.763040 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerStarted","Data":"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e"} Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.763100 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerStarted","Data":"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903"} Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.780507 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.780484603 podStartE2EDuration="2.780484603s" podCreationTimestamp="2026-02-03 07:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:08.769516549 +0000 UTC m=+1327.056210355" watchObservedRunningTime="2026-02-03 07:08:08.780484603 +0000 UTC m=+1327.067178409" Feb 03 07:08:08 crc kubenswrapper[4998]: I0203 07:08:08.802019 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.802000617 podStartE2EDuration="2.802000617s" podCreationTimestamp="2026-02-03 07:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:08.794412651 +0000 UTC m=+1327.081106457" watchObservedRunningTime="2026-02-03 07:08:08.802000617 +0000 UTC m=+1327.088694423" Feb 03 07:08:09 crc kubenswrapper[4998]: I0203 07:08:09.889317 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:08:09 crc kubenswrapper[4998]: I0203 07:08:09.889910 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:08:09 crc kubenswrapper[4998]: I0203 07:08:09.892058 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:08:09 crc kubenswrapper[4998]: I0203 07:08:09.897865 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:08:10 crc kubenswrapper[4998]: I0203 07:08:10.781513 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:08:10 crc kubenswrapper[4998]: I0203 07:08:10.784815 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:08:10 crc kubenswrapper[4998]: I0203 07:08:10.980698 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:08:10 crc kubenswrapper[4998]: I0203 07:08:10.982568 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:10 crc kubenswrapper[4998]: I0203 07:08:10.996270 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173166 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173248 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173315 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km6ll\" (UniqueName: \"kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173339 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173528 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.173599 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275239 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275314 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275382 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km6ll\" (UniqueName: \"kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275404 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275445 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.275470 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.276388 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.276433 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.276617 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.276617 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.277147 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.296034 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km6ll\" (UniqueName: \"kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll\") pod \"dnsmasq-dns-74b99f9475-2cfcp\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.309962 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:11 crc kubenswrapper[4998]: W0203 07:08:11.856422 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf61ec1cb_5d51_4c91_89a6_cbdb2d290838.slice/crio-4dec98b26780c5870d9d64aac3ce278761abf24556a69dbde6d69b67dcf57158 WatchSource:0}: Error finding container 4dec98b26780c5870d9d64aac3ce278761abf24556a69dbde6d69b67dcf57158: Status 404 returned error can't find the container with id 4dec98b26780c5870d9d64aac3ce278761abf24556a69dbde6d69b67dcf57158 Feb 03 07:08:11 crc kubenswrapper[4998]: I0203 07:08:11.860795 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.150907 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.152308 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.414759 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.821108 4998 generic.go:334] "Generic (PLEG): container finished" podID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerID="107d045d204e2e40e70cddd937947abf2fe6811cfd97b27cdf2c86d591e10b28" exitCode=0 Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.821206 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" event={"ID":"f61ec1cb-5d51-4c91-89a6-cbdb2d290838","Type":"ContainerDied","Data":"107d045d204e2e40e70cddd937947abf2fe6811cfd97b27cdf2c86d591e10b28"} Feb 03 07:08:12 crc kubenswrapper[4998]: I0203 07:08:12.821257 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" event={"ID":"f61ec1cb-5d51-4c91-89a6-cbdb2d290838","Type":"ContainerStarted","Data":"4dec98b26780c5870d9d64aac3ce278761abf24556a69dbde6d69b67dcf57158"} Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.354510 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.355141 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-notification-agent" containerID="cri-o://5d7d41cca39d4807855aca639882004c6a0546d3132beb684028e22eee232e64" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.355162 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="sg-core" containerID="cri-o://68b523af09a7e35d7d9fbb9ab1b0fab68d289b1089cd6f955bee40a1839a9c65" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.355244 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="proxy-httpd" containerID="cri-o://bcba74e1e0494391ede410b57660f2f6f4c653650d3a0894b654391063d77965" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.355305 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-central-agent" containerID="cri-o://bdfed3c637d5a7e646aafe270bfdd002d1e9e70fc02fb8311d733fa26a2bd587" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.371603 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.818213 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.834910 4998 generic.go:334] "Generic (PLEG): container finished" podID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerID="bcba74e1e0494391ede410b57660f2f6f4c653650d3a0894b654391063d77965" exitCode=0 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.834947 4998 generic.go:334] "Generic (PLEG): container finished" podID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerID="68b523af09a7e35d7d9fbb9ab1b0fab68d289b1089cd6f955bee40a1839a9c65" exitCode=2 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.834957 4998 generic.go:334] "Generic (PLEG): container finished" podID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerID="bdfed3c637d5a7e646aafe270bfdd002d1e9e70fc02fb8311d733fa26a2bd587" exitCode=0 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.835003 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerDied","Data":"bcba74e1e0494391ede410b57660f2f6f4c653650d3a0894b654391063d77965"} Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.835031 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerDied","Data":"68b523af09a7e35d7d9fbb9ab1b0fab68d289b1089cd6f955bee40a1839a9c65"} Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.835040 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerDied","Data":"bdfed3c637d5a7e646aafe270bfdd002d1e9e70fc02fb8311d733fa26a2bd587"} Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.836895 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-log" containerID="cri-o://1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.837867 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" event={"ID":"f61ec1cb-5d51-4c91-89a6-cbdb2d290838","Type":"ContainerStarted","Data":"0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae"} Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.837897 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.838193 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-api" containerID="cri-o://6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e" gracePeriod=30 Feb 03 07:08:13 crc kubenswrapper[4998]: I0203 07:08:13.853828 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" podStartSLOduration=3.853770365 podStartE2EDuration="3.853770365s" podCreationTimestamp="2026-02-03 07:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:13.853336322 +0000 UTC m=+1332.140030148" watchObservedRunningTime="2026-02-03 07:08:13.853770365 +0000 UTC m=+1332.140464171" Feb 03 07:08:14 crc kubenswrapper[4998]: I0203 07:08:14.847207 4998 generic.go:334] "Generic (PLEG): container finished" podID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerID="1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b" exitCode=143 Feb 03 07:08:14 crc kubenswrapper[4998]: I0203 07:08:14.847251 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerDied","Data":"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b"} Feb 03 07:08:15 crc kubenswrapper[4998]: I0203 07:08:15.858533 4998 generic.go:334] "Generic (PLEG): container finished" podID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerID="5d7d41cca39d4807855aca639882004c6a0546d3132beb684028e22eee232e64" exitCode=0 Feb 03 07:08:15 crc kubenswrapper[4998]: I0203 07:08:15.858630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerDied","Data":"5d7d41cca39d4807855aca639882004c6a0546d3132beb684028e22eee232e64"} Feb 03 07:08:15 crc kubenswrapper[4998]: I0203 07:08:15.858835 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"948b79ac-ffa3-4848-8800-462eb894cb0c","Type":"ContainerDied","Data":"db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a"} Feb 03 07:08:15 crc kubenswrapper[4998]: I0203 07:08:15.858855 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db535ba041f5f0ecc1caa74c6ba022027f005e895dd9b2dcd5f060ecae9eb80a" Feb 03 07:08:15 crc kubenswrapper[4998]: I0203 07:08:15.934826 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.076915 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.076981 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlpgr\" (UniqueName: \"kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077097 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077140 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077317 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077354 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077438 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077481 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle\") pod \"948b79ac-ffa3-4848-8800-462eb894cb0c\" (UID: \"948b79ac-ffa3-4848-8800-462eb894cb0c\") " Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.077653 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.078106 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.078133 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/948b79ac-ffa3-4848-8800-462eb894cb0c-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.082567 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts" (OuterVolumeSpecName: "scripts") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.084074 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr" (OuterVolumeSpecName: "kube-api-access-wlpgr") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "kube-api-access-wlpgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.118919 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.161911 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.171916 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.179772 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.179818 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlpgr\" (UniqueName: \"kubernetes.io/projected/948b79ac-ffa3-4848-8800-462eb894cb0c-kube-api-access-wlpgr\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.179828 4998 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.179836 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.179844 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.192688 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data" (OuterVolumeSpecName: "config-data") pod "948b79ac-ffa3-4848-8800-462eb894cb0c" (UID: "948b79ac-ffa3-4848-8800-462eb894cb0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.282557 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b79ac-ffa3-4848-8800-462eb894cb0c-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.871057 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.902883 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.924476 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.951059 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:16 crc kubenswrapper[4998]: E0203 07:08:16.951768 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="sg-core" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.951831 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="sg-core" Feb 03 07:08:16 crc kubenswrapper[4998]: E0203 07:08:16.951865 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-central-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.951882 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-central-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: E0203 07:08:16.951950 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="proxy-httpd" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.951968 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="proxy-httpd" Feb 03 07:08:16 crc kubenswrapper[4998]: E0203 07:08:16.952018 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-notification-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.952036 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-notification-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.952449 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-central-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.952471 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="ceilometer-notification-agent" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.952494 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="proxy-httpd" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.952551 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" containerName="sg-core" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.956690 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.963860 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.969869 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.970256 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:08:16 crc kubenswrapper[4998]: I0203 07:08:16.970693 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.096626 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.096771 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sndgt\" (UniqueName: \"kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.096970 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.097045 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.097114 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.097165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.097280 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.097391 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.150452 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.150492 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202225 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202413 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202460 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sndgt\" (UniqueName: \"kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202523 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202559 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202584 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202608 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.202648 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.204318 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.204880 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.209283 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.209336 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.210498 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.215141 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.218026 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.220351 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sndgt\" (UniqueName: \"kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt\") pod \"ceilometer-0\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.275314 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.379152 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.409527 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs\") pod \"12b26172-a00e-4d89-9129-00fe6712e6c7\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.409649 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j84qg\" (UniqueName: \"kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg\") pod \"12b26172-a00e-4d89-9129-00fe6712e6c7\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.409760 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data\") pod \"12b26172-a00e-4d89-9129-00fe6712e6c7\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.409797 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle\") pod \"12b26172-a00e-4d89-9129-00fe6712e6c7\" (UID: \"12b26172-a00e-4d89-9129-00fe6712e6c7\") " Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.417410 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs" (OuterVolumeSpecName: "logs") pod "12b26172-a00e-4d89-9129-00fe6712e6c7" (UID: "12b26172-a00e-4d89-9129-00fe6712e6c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.418029 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.420871 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg" (OuterVolumeSpecName: "kube-api-access-j84qg") pod "12b26172-a00e-4d89-9129-00fe6712e6c7" (UID: "12b26172-a00e-4d89-9129-00fe6712e6c7"). InnerVolumeSpecName "kube-api-access-j84qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.440205 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12b26172-a00e-4d89-9129-00fe6712e6c7" (UID: "12b26172-a00e-4d89-9129-00fe6712e6c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.441014 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.443544 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data" (OuterVolumeSpecName: "config-data") pod "12b26172-a00e-4d89-9129-00fe6712e6c7" (UID: "12b26172-a00e-4d89-9129-00fe6712e6c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.511623 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.511650 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12b26172-a00e-4d89-9129-00fe6712e6c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.511658 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12b26172-a00e-4d89-9129-00fe6712e6c7-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.511667 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j84qg\" (UniqueName: \"kubernetes.io/projected/12b26172-a00e-4d89-9129-00fe6712e6c7-kube-api-access-j84qg\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.578512 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.881542 4998 generic.go:334] "Generic (PLEG): container finished" podID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerID="6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e" exitCode=0 Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.881586 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.881606 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerDied","Data":"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e"} Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.882101 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12b26172-a00e-4d89-9129-00fe6712e6c7","Type":"ContainerDied","Data":"52ab47d7a50f697c7a3597263dd0ad106dcf990467b66c52b4f44d86375c6162"} Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.882122 4998 scope.go:117] "RemoveContainer" containerID="6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.883208 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerStarted","Data":"c68d5a752f8542c881b80d817b6cd229d8246b8a0c06779e1b712183c0be6221"} Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.916204 4998 scope.go:117] "RemoveContainer" containerID="1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.927461 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.972233 4998 scope.go:117] "RemoveContainer" containerID="6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.987441 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:17 crc kubenswrapper[4998]: E0203 07:08:17.991921 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e\": container with ID starting with 6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e not found: ID does not exist" containerID="6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.991966 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e"} err="failed to get container status \"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e\": rpc error: code = NotFound desc = could not find container \"6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e\": container with ID starting with 6fdd5fb17be38409a8883ab84d0a39e9f24a345d034f67cff54131ed39bb8f7e not found: ID does not exist" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.991991 4998 scope.go:117] "RemoveContainer" containerID="1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b" Feb 03 07:08:17 crc kubenswrapper[4998]: E0203 07:08:17.992277 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b\": container with ID starting with 1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b not found: ID does not exist" containerID="1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b" Feb 03 07:08:17 crc kubenswrapper[4998]: I0203 07:08:17.992307 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b"} err="failed to get container status \"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b\": rpc error: code = NotFound desc = could not find container \"1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b\": container with ID starting with 1417b4979181c956361e869005d120271e96ca441fc4cc159ac55b76833e6e2b not found: ID does not exist" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.006617 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.028662 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:18 crc kubenswrapper[4998]: E0203 07:08:18.029148 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-api" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.029168 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-api" Feb 03 07:08:18 crc kubenswrapper[4998]: E0203 07:08:18.029235 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-log" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.029244 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-log" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.029935 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-log" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.029982 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" containerName="nova-api-api" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.032110 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.037631 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.037888 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.038487 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.054843 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.135470 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.135646 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.135707 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.135791 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.135865 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.136015 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj5gf\" (UniqueName: \"kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.160999 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.160988 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238135 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj5gf\" (UniqueName: \"kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238235 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238308 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238334 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238367 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238391 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.238858 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.242376 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.242692 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.244062 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.244932 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.249844 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-zhwfb"] Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.250932 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.253131 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.253271 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.260834 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhwfb"] Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.272851 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj5gf\" (UniqueName: \"kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf\") pod \"nova-api-0\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.367397 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.437746 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12b26172-a00e-4d89-9129-00fe6712e6c7" path="/var/lib/kubelet/pods/12b26172-a00e-4d89-9129-00fe6712e6c7/volumes" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.438437 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948b79ac-ffa3-4848-8800-462eb894cb0c" path="/var/lib/kubelet/pods/948b79ac-ffa3-4848-8800-462eb894cb0c/volumes" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.441257 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.441424 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx8ml\" (UniqueName: \"kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.441540 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.442482 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.544372 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx8ml\" (UniqueName: \"kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.544511 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.544619 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.544750 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.551396 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.551930 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.555735 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.567037 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx8ml\" (UniqueName: \"kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml\") pod \"nova-cell1-cell-mapping-zhwfb\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.585491 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.883165 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:18 crc kubenswrapper[4998]: W0203 07:08:18.893699 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c71d250_1023_44da_91b9_3ed324fbfba7.slice/crio-ee95cea1584ee61e73c371324ba62f05b0576e87207a71b8f717e9254d6e2208 WatchSource:0}: Error finding container ee95cea1584ee61e73c371324ba62f05b0576e87207a71b8f717e9254d6e2208: Status 404 returned error can't find the container with id ee95cea1584ee61e73c371324ba62f05b0576e87207a71b8f717e9254d6e2208 Feb 03 07:08:18 crc kubenswrapper[4998]: I0203 07:08:18.905148 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerStarted","Data":"735a947af0313563fd152251e4f01a7d98bb038f3738b13aa0956d4060411491"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.098847 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhwfb"] Feb 03 07:08:19 crc kubenswrapper[4998]: W0203 07:08:19.104252 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c35d79b_cda7_4d32_aa7e_208fb8a12e15.slice/crio-0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50 WatchSource:0}: Error finding container 0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50: Status 404 returned error can't find the container with id 0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50 Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.918745 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerStarted","Data":"da3d6f3489ff3ea9baaa57c3a51e435fe24ab651749286ef5d6fb275fe2c2a08"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.918981 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerStarted","Data":"a7442e55da5b397ed68925bc02a81c7967c0ba70c8e805f561094c6b1def8d26"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.920962 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerStarted","Data":"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.921001 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerStarted","Data":"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.921010 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerStarted","Data":"ee95cea1584ee61e73c371324ba62f05b0576e87207a71b8f717e9254d6e2208"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.922545 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhwfb" event={"ID":"8c35d79b-cda7-4d32-aa7e-208fb8a12e15","Type":"ContainerStarted","Data":"7a850fda6a5668f7f1d0f032de0d6df1c4a6d33f4f9ca91cea233a9b53646025"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.922583 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhwfb" event={"ID":"8c35d79b-cda7-4d32-aa7e-208fb8a12e15","Type":"ContainerStarted","Data":"0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50"} Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.953460 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.953442566 podStartE2EDuration="2.953442566s" podCreationTimestamp="2026-02-03 07:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:19.951305795 +0000 UTC m=+1338.237999611" watchObservedRunningTime="2026-02-03 07:08:19.953442566 +0000 UTC m=+1338.240136372" Feb 03 07:08:19 crc kubenswrapper[4998]: I0203 07:08:19.976752 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-zhwfb" podStartSLOduration=1.976731101 podStartE2EDuration="1.976731101s" podCreationTimestamp="2026-02-03 07:08:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:19.968551597 +0000 UTC m=+1338.255245413" watchObservedRunningTime="2026-02-03 07:08:19.976731101 +0000 UTC m=+1338.263424917" Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.312292 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.381349 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.381942 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="dnsmasq-dns" containerID="cri-o://0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8" gracePeriod=10 Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.942406 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.946052 4998 generic.go:334] "Generic (PLEG): container finished" podID="9182f422-2564-4a1c-b790-d215f7984cf1" containerID="0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8" exitCode=0 Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.946348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" event={"ID":"9182f422-2564-4a1c-b790-d215f7984cf1","Type":"ContainerDied","Data":"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8"} Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.946614 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" event={"ID":"9182f422-2564-4a1c-b790-d215f7984cf1","Type":"ContainerDied","Data":"82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6"} Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.946429 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-868bc9dc59-wlq6q" Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.946656 4998 scope.go:117] "RemoveContainer" containerID="0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8" Feb 03 07:08:21 crc kubenswrapper[4998]: I0203 07:08:21.996000 4998 scope.go:117] "RemoveContainer" containerID="91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.033941 4998 scope.go:117] "RemoveContainer" containerID="0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8" Feb 03 07:08:22 crc kubenswrapper[4998]: E0203 07:08:22.034311 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8\": container with ID starting with 0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8 not found: ID does not exist" containerID="0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.034337 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8"} err="failed to get container status \"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8\": rpc error: code = NotFound desc = could not find container \"0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8\": container with ID starting with 0b36fd3df7994846c03a354a1f5a5dda6a31761a0bcc03334232234861a84ef8 not found: ID does not exist" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.034356 4998 scope.go:117] "RemoveContainer" containerID="91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794" Feb 03 07:08:22 crc kubenswrapper[4998]: E0203 07:08:22.034568 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794\": container with ID starting with 91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794 not found: ID does not exist" containerID="91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.034590 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794"} err="failed to get container status \"91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794\": rpc error: code = NotFound desc = could not find container \"91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794\": container with ID starting with 91644449567dc1e63d076d2b84eed880d36fc910238511d9c04119e36ed16794 not found: ID does not exist" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.115603 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.115908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.115941 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.116004 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.116091 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clzlj\" (UniqueName: \"kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.116112 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb\") pod \"9182f422-2564-4a1c-b790-d215f7984cf1\" (UID: \"9182f422-2564-4a1c-b790-d215f7984cf1\") " Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.122564 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj" (OuterVolumeSpecName: "kube-api-access-clzlj") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "kube-api-access-clzlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.172316 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config" (OuterVolumeSpecName: "config") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.173223 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.178570 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.182810 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.187766 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9182f422-2564-4a1c-b790-d215f7984cf1" (UID: "9182f422-2564-4a1c-b790-d215f7984cf1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218255 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218296 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218316 4998 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218328 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clzlj\" (UniqueName: \"kubernetes.io/projected/9182f422-2564-4a1c-b790-d215f7984cf1-kube-api-access-clzlj\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218340 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.218354 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9182f422-2564-4a1c-b790-d215f7984cf1-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.285947 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.300942 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-868bc9dc59-wlq6q"] Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.440487 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" path="/var/lib/kubelet/pods/9182f422-2564-4a1c-b790-d215f7984cf1/volumes" Feb 03 07:08:22 crc kubenswrapper[4998]: E0203 07:08:22.485276 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9182f422_2564_4a1c_b790_d215f7984cf1.slice/crio-82a8a075bd0860d14f144aba7cff749644ce94d68064e6bcbf411a029cd35ea6\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9182f422_2564_4a1c_b790_d215f7984cf1.slice\": RecentStats: unable to find data in memory cache]" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.968354 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerStarted","Data":"bd760878032362b5e2dbbae0a17767478623ce84fedeea219d3003f8e309bbec"} Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.968624 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:08:22 crc kubenswrapper[4998]: I0203 07:08:22.990672 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.928597972 podStartE2EDuration="6.99065013s" podCreationTimestamp="2026-02-03 07:08:16 +0000 UTC" firstStartedPulling="2026-02-03 07:08:17.590977111 +0000 UTC m=+1335.877670917" lastFinishedPulling="2026-02-03 07:08:21.653029269 +0000 UTC m=+1339.939723075" observedRunningTime="2026-02-03 07:08:22.984514754 +0000 UTC m=+1341.271208600" watchObservedRunningTime="2026-02-03 07:08:22.99065013 +0000 UTC m=+1341.277343936" Feb 03 07:08:25 crc kubenswrapper[4998]: I0203 07:08:25.001198 4998 generic.go:334] "Generic (PLEG): container finished" podID="8c35d79b-cda7-4d32-aa7e-208fb8a12e15" containerID="7a850fda6a5668f7f1d0f032de0d6df1c4a6d33f4f9ca91cea233a9b53646025" exitCode=0 Feb 03 07:08:25 crc kubenswrapper[4998]: I0203 07:08:25.001581 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhwfb" event={"ID":"8c35d79b-cda7-4d32-aa7e-208fb8a12e15","Type":"ContainerDied","Data":"7a850fda6a5668f7f1d0f032de0d6df1c4a6d33f4f9ca91cea233a9b53646025"} Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.476372 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.643521 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts\") pod \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.643906 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data\") pod \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.644245 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx8ml\" (UniqueName: \"kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml\") pod \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.644341 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle\") pod \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\" (UID: \"8c35d79b-cda7-4d32-aa7e-208fb8a12e15\") " Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.649820 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts" (OuterVolumeSpecName: "scripts") pod "8c35d79b-cda7-4d32-aa7e-208fb8a12e15" (UID: "8c35d79b-cda7-4d32-aa7e-208fb8a12e15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.654968 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml" (OuterVolumeSpecName: "kube-api-access-dx8ml") pod "8c35d79b-cda7-4d32-aa7e-208fb8a12e15" (UID: "8c35d79b-cda7-4d32-aa7e-208fb8a12e15"). InnerVolumeSpecName "kube-api-access-dx8ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.672313 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c35d79b-cda7-4d32-aa7e-208fb8a12e15" (UID: "8c35d79b-cda7-4d32-aa7e-208fb8a12e15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.672908 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data" (OuterVolumeSpecName: "config-data") pod "8c35d79b-cda7-4d32-aa7e-208fb8a12e15" (UID: "8c35d79b-cda7-4d32-aa7e-208fb8a12e15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.746419 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx8ml\" (UniqueName: \"kubernetes.io/projected/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-kube-api-access-dx8ml\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.746488 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.746502 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:26 crc kubenswrapper[4998]: I0203 07:08:26.746511 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c35d79b-cda7-4d32-aa7e-208fb8a12e15-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.037675 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhwfb" event={"ID":"8c35d79b-cda7-4d32-aa7e-208fb8a12e15","Type":"ContainerDied","Data":"0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50"} Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.037734 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0defb6474b42bab1ac43bc3f22c6be7f3b8566882107e7992c86b80575d8ba50" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.037821 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhwfb" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.159901 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.160995 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.165227 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.207183 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.207558 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-log" containerID="cri-o://c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" gracePeriod=30 Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.207836 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-api" containerID="cri-o://2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" gracePeriod=30 Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.219812 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.220061 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerName="nova-scheduler-scheduler" containerID="cri-o://5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" gracePeriod=30 Feb 03 07:08:27 crc kubenswrapper[4998]: I0203 07:08:27.266445 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.011831 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.046151 4998 generic.go:334] "Generic (PLEG): container finished" podID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerID="5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" exitCode=0 Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.046209 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6","Type":"ContainerDied","Data":"5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c"} Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047801 4998 generic.go:334] "Generic (PLEG): container finished" podID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerID="2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" exitCode=0 Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047831 4998 generic.go:334] "Generic (PLEG): container finished" podID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerID="c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" exitCode=143 Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047854 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047918 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerDied","Data":"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617"} Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047944 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerDied","Data":"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea"} Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047955 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c71d250-1023-44da-91b9-3ed324fbfba7","Type":"ContainerDied","Data":"ee95cea1584ee61e73c371324ba62f05b0576e87207a71b8f717e9254d6e2208"} Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.047969 4998 scope.go:117] "RemoveContainer" containerID="2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.059464 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.066828 4998 scope.go:117] "RemoveContainer" containerID="c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.102642 4998 scope.go:117] "RemoveContainer" containerID="2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.103893 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617\": container with ID starting with 2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617 not found: ID does not exist" containerID="2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.103935 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617"} err="failed to get container status \"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617\": rpc error: code = NotFound desc = could not find container \"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617\": container with ID starting with 2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617 not found: ID does not exist" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.103968 4998 scope.go:117] "RemoveContainer" containerID="c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.104431 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea\": container with ID starting with c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea not found: ID does not exist" containerID="c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.104474 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea"} err="failed to get container status \"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea\": rpc error: code = NotFound desc = could not find container \"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea\": container with ID starting with c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea not found: ID does not exist" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.104505 4998 scope.go:117] "RemoveContainer" containerID="2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.106228 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617"} err="failed to get container status \"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617\": rpc error: code = NotFound desc = could not find container \"2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617\": container with ID starting with 2bb4317391cd9f45b6912c432446b132b98c6c9bbfd37274ca8ed44a0d0d4617 not found: ID does not exist" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.106279 4998 scope.go:117] "RemoveContainer" containerID="c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.106875 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea"} err="failed to get container status \"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea\": rpc error: code = NotFound desc = could not find container \"c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea\": container with ID starting with c8a66ccdcc6923a1b299449b51dac0d35aeba2badd6e3bc8f193ff193110aaea not found: ID does not exist" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179072 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179178 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179258 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179359 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179452 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj5gf\" (UniqueName: \"kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179477 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle\") pod \"6c71d250-1023-44da-91b9-3ed324fbfba7\" (UID: \"6c71d250-1023-44da-91b9-3ed324fbfba7\") " Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.179544 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs" (OuterVolumeSpecName: "logs") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.180022 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c71d250-1023-44da-91b9-3ed324fbfba7-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.184638 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf" (OuterVolumeSpecName: "kube-api-access-gj5gf") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "kube-api-access-gj5gf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.212578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.215407 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data" (OuterVolumeSpecName: "config-data") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.240760 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.244798 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6c71d250-1023-44da-91b9-3ed324fbfba7" (UID: "6c71d250-1023-44da-91b9-3ed324fbfba7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.281844 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.281883 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj5gf\" (UniqueName: \"kubernetes.io/projected/6c71d250-1023-44da-91b9-3ed324fbfba7-kube-api-access-gj5gf\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.281896 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.281909 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.281923 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c71d250-1023-44da-91b9-3ed324fbfba7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.383719 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.400719 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.411033 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.414425 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-log" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414455 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-log" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.414472 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="init" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414478 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="init" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.414489 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="dnsmasq-dns" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414498 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="dnsmasq-dns" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.414514 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-api" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414521 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-api" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.414538 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c35d79b-cda7-4d32-aa7e-208fb8a12e15" containerName="nova-manage" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414543 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c35d79b-cda7-4d32-aa7e-208fb8a12e15" containerName="nova-manage" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414717 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-log" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414740 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c35d79b-cda7-4d32-aa7e-208fb8a12e15" containerName="nova-manage" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414748 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9182f422-2564-4a1c-b790-d215f7984cf1" containerName="dnsmasq-dns" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.414760 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" containerName="nova-api-api" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.415745 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.422424 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.422914 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.423118 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.426561 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.452057 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c71d250-1023-44da-91b9-3ed324fbfba7" path="/var/lib/kubelet/pods/6c71d250-1023-44da-91b9-3ed324fbfba7/volumes" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597446 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597563 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597603 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sbff\" (UniqueName: \"kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597690 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597735 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.597824 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699603 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699641 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sbff\" (UniqueName: \"kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699699 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699714 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699753 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.699832 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.700956 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.705740 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.706155 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.707013 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.713847 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.722561 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sbff\" (UniqueName: \"kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff\") pod \"nova-api-0\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.732656 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.814590 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c is running failed: container process not found" containerID="5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.815226 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c is running failed: container process not found" containerID="5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.819040 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c is running failed: container process not found" containerID="5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:08:28 crc kubenswrapper[4998]: E0203 07:08:28.819093 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerName="nova-scheduler-scheduler" Feb 03 07:08:28 crc kubenswrapper[4998]: I0203 07:08:28.848299 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.010439 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqxzb\" (UniqueName: \"kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb\") pod \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.010547 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data\") pod \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.010578 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle\") pod \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\" (UID: \"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6\") " Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.033093 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb" (OuterVolumeSpecName: "kube-api-access-vqxzb") pod "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" (UID: "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6"). InnerVolumeSpecName "kube-api-access-vqxzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.076117 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data" (OuterVolumeSpecName: "config-data") pod "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" (UID: "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.090824 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" containerID="cri-o://8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903" gracePeriod=30 Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.090960 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.091099 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c3c7cdda-009a-4dea-a2ab-3e093e5c39c6","Type":"ContainerDied","Data":"573e6aa57b9c1710b70a9c2c1fdaf3f3927711abb65e4a2f388232d317600e00"} Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.091166 4998 scope.go:117] "RemoveContainer" containerID="5199bbaab58d095e3c5146f0fbe53ad8df8668270c1799d5d4a1d419d1236d3c" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.091112 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" containerID="cri-o://d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e" gracePeriod=30 Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.094598 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" (UID: "c3c7cdda-009a-4dea-a2ab-3e093e5c39c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.112858 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqxzb\" (UniqueName: \"kubernetes.io/projected/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-kube-api-access-vqxzb\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.112894 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.112909 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:29 crc kubenswrapper[4998]: W0203 07:08:29.302834 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1280d3d_d626_4af9_b262_93fea6a5bbc9.slice/crio-3cac9d8cd817ada51f05662f9efe986e05e76cc89cff3d334f67cffa2cc3efca WatchSource:0}: Error finding container 3cac9d8cd817ada51f05662f9efe986e05e76cc89cff3d334f67cffa2cc3efca: Status 404 returned error can't find the container with id 3cac9d8cd817ada51f05662f9efe986e05e76cc89cff3d334f67cffa2cc3efca Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.303226 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.449612 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.502855 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.514565 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:29 crc kubenswrapper[4998]: E0203 07:08:29.515139 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerName="nova-scheduler-scheduler" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.515167 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerName="nova-scheduler-scheduler" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.515430 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" containerName="nova-scheduler-scheduler" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.516627 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.519190 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.527620 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.630900 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.631151 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.631305 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt9ps\" (UniqueName: \"kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.732953 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.733327 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.733389 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt9ps\" (UniqueName: \"kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.741657 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.742310 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.751621 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt9ps\" (UniqueName: \"kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps\") pod \"nova-scheduler-0\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " pod="openstack/nova-scheduler-0" Feb 03 07:08:29 crc kubenswrapper[4998]: I0203 07:08:29.993515 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.102099 4998 generic.go:334] "Generic (PLEG): container finished" podID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerID="8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903" exitCode=143 Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.102198 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerDied","Data":"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903"} Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.105697 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerStarted","Data":"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983"} Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.105754 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerStarted","Data":"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425"} Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.105764 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerStarted","Data":"3cac9d8cd817ada51f05662f9efe986e05e76cc89cff3d334f67cffa2cc3efca"} Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.131569 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.131551472 podStartE2EDuration="2.131551472s" podCreationTimestamp="2026-02-03 07:08:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:30.127195138 +0000 UTC m=+1348.413888954" watchObservedRunningTime="2026-02-03 07:08:30.131551472 +0000 UTC m=+1348.418245278" Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.450172 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3c7cdda-009a-4dea-a2ab-3e093e5c39c6" path="/var/lib/kubelet/pods/c3c7cdda-009a-4dea-a2ab-3e093e5c39c6/volumes" Feb 03 07:08:30 crc kubenswrapper[4998]: I0203 07:08:30.529368 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:08:30 crc kubenswrapper[4998]: W0203 07:08:30.529688 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a94b6e1_cdf7_4088_9f55_60457fa411f4.slice/crio-5ec0b5ec4fb1b7333e0489cfb6bf47739e935fbfd86ad7e44ffc03d0071abfc7 WatchSource:0}: Error finding container 5ec0b5ec4fb1b7333e0489cfb6bf47739e935fbfd86ad7e44ffc03d0071abfc7: Status 404 returned error can't find the container with id 5ec0b5ec4fb1b7333e0489cfb6bf47739e935fbfd86ad7e44ffc03d0071abfc7 Feb 03 07:08:31 crc kubenswrapper[4998]: I0203 07:08:31.115105 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a94b6e1-cdf7-4088-9f55-60457fa411f4","Type":"ContainerStarted","Data":"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6"} Feb 03 07:08:31 crc kubenswrapper[4998]: I0203 07:08:31.115431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a94b6e1-cdf7-4088-9f55-60457fa411f4","Type":"ContainerStarted","Data":"5ec0b5ec4fb1b7333e0489cfb6bf47739e935fbfd86ad7e44ffc03d0071abfc7"} Feb 03 07:08:31 crc kubenswrapper[4998]: I0203 07:08:31.152768 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.152748381 podStartE2EDuration="2.152748381s" podCreationTimestamp="2026-02-03 07:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:31.144880016 +0000 UTC m=+1349.431573832" watchObservedRunningTime="2026-02-03 07:08:31.152748381 +0000 UTC m=+1349.439442177" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.210375 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:33546->10.217.0.198:8775: read: connection reset by peer" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.210391 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:33548->10.217.0.198:8775: read: connection reset by peer" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.675311 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.735073 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs\") pod \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.735921 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs" (OuterVolumeSpecName: "logs") pod "98d719ac-e7cd-4a10-bed5-5baec4c17f48" (UID: "98d719ac-e7cd-4a10-bed5-5baec4c17f48"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.837096 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs\") pod \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.837539 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data\") pod \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.837616 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2x9v\" (UniqueName: \"kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v\") pod \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.837715 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle\") pod \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\" (UID: \"98d719ac-e7cd-4a10-bed5-5baec4c17f48\") " Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.838276 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98d719ac-e7cd-4a10-bed5-5baec4c17f48-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.843300 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v" (OuterVolumeSpecName: "kube-api-access-k2x9v") pod "98d719ac-e7cd-4a10-bed5-5baec4c17f48" (UID: "98d719ac-e7cd-4a10-bed5-5baec4c17f48"). InnerVolumeSpecName "kube-api-access-k2x9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.864724 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data" (OuterVolumeSpecName: "config-data") pod "98d719ac-e7cd-4a10-bed5-5baec4c17f48" (UID: "98d719ac-e7cd-4a10-bed5-5baec4c17f48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.868576 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98d719ac-e7cd-4a10-bed5-5baec4c17f48" (UID: "98d719ac-e7cd-4a10-bed5-5baec4c17f48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.895858 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "98d719ac-e7cd-4a10-bed5-5baec4c17f48" (UID: "98d719ac-e7cd-4a10-bed5-5baec4c17f48"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.939774 4998 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.939867 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.939898 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2x9v\" (UniqueName: \"kubernetes.io/projected/98d719ac-e7cd-4a10-bed5-5baec4c17f48-kube-api-access-k2x9v\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:32 crc kubenswrapper[4998]: I0203 07:08:32.939917 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d719ac-e7cd-4a10-bed5-5baec4c17f48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.135983 4998 generic.go:334] "Generic (PLEG): container finished" podID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerID="d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e" exitCode=0 Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.136058 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerDied","Data":"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e"} Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.136091 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98d719ac-e7cd-4a10-bed5-5baec4c17f48","Type":"ContainerDied","Data":"d922115110935e3fc45d55e65c99e52fc963452360da9b3d5d1547d0dc0fe3dc"} Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.136136 4998 scope.go:117] "RemoveContainer" containerID="d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.136313 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.178769 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.184099 4998 scope.go:117] "RemoveContainer" containerID="8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.200378 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.215050 4998 scope.go:117] "RemoveContainer" containerID="d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.228906 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:33 crc kubenswrapper[4998]: E0203 07:08:33.229079 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e\": container with ID starting with d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e not found: ID does not exist" containerID="d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229121 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e"} err="failed to get container status \"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e\": rpc error: code = NotFound desc = could not find container \"d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e\": container with ID starting with d83b423a6de677a68c565b09c55ffc1294f4a4cbf02653fe75a7044c689b074e not found: ID does not exist" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229149 4998 scope.go:117] "RemoveContainer" containerID="8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903" Feb 03 07:08:33 crc kubenswrapper[4998]: E0203 07:08:33.229327 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229339 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" Feb 03 07:08:33 crc kubenswrapper[4998]: E0203 07:08:33.229358 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229365 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229582 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-log" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.229604 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" containerName="nova-metadata-metadata" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.230507 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: E0203 07:08:33.232448 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903\": container with ID starting with 8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903 not found: ID does not exist" containerID="8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.232478 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903"} err="failed to get container status \"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903\": rpc error: code = NotFound desc = could not find container \"8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903\": container with ID starting with 8f8e875d562862d9a9107a664097fccc0dd492a1464e83ac0f4f306928f35903 not found: ID does not exist" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.239524 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.243332 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.243574 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.244845 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.244891 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62dc5\" (UniqueName: \"kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.244968 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.245017 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.245052 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.347144 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.347263 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.347333 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62dc5\" (UniqueName: \"kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.347370 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.347428 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.348926 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.352464 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.354097 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.354222 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.367620 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62dc5\" (UniqueName: \"kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5\") pod \"nova-metadata-0\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " pod="openstack/nova-metadata-0" Feb 03 07:08:33 crc kubenswrapper[4998]: I0203 07:08:33.557430 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:08:34 crc kubenswrapper[4998]: I0203 07:08:34.002648 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:08:34 crc kubenswrapper[4998]: W0203 07:08:34.012273 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e51da52_0dd9_4394_bb81_c4a1e534ad17.slice/crio-ea39da380c4f22920d284cc6f15e2b24322f06345bed439ea44f73f6349ff79c WatchSource:0}: Error finding container ea39da380c4f22920d284cc6f15e2b24322f06345bed439ea44f73f6349ff79c: Status 404 returned error can't find the container with id ea39da380c4f22920d284cc6f15e2b24322f06345bed439ea44f73f6349ff79c Feb 03 07:08:34 crc kubenswrapper[4998]: I0203 07:08:34.150084 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerStarted","Data":"ea39da380c4f22920d284cc6f15e2b24322f06345bed439ea44f73f6349ff79c"} Feb 03 07:08:34 crc kubenswrapper[4998]: I0203 07:08:34.442822 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98d719ac-e7cd-4a10-bed5-5baec4c17f48" path="/var/lib/kubelet/pods/98d719ac-e7cd-4a10-bed5-5baec4c17f48/volumes" Feb 03 07:08:34 crc kubenswrapper[4998]: I0203 07:08:34.994340 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:08:35 crc kubenswrapper[4998]: I0203 07:08:35.163522 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerStarted","Data":"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304"} Feb 03 07:08:35 crc kubenswrapper[4998]: I0203 07:08:35.163601 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerStarted","Data":"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353"} Feb 03 07:08:35 crc kubenswrapper[4998]: I0203 07:08:35.197097 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.197076782 podStartE2EDuration="2.197076782s" podCreationTimestamp="2026-02-03 07:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:08:35.187374225 +0000 UTC m=+1353.474068041" watchObservedRunningTime="2026-02-03 07:08:35.197076782 +0000 UTC m=+1353.483770598" Feb 03 07:08:38 crc kubenswrapper[4998]: I0203 07:08:38.558066 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:08:38 crc kubenswrapper[4998]: I0203 07:08:38.558609 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:08:38 crc kubenswrapper[4998]: I0203 07:08:38.733388 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:08:38 crc kubenswrapper[4998]: I0203 07:08:38.733461 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:08:39 crc kubenswrapper[4998]: I0203 07:08:39.750044 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:39 crc kubenswrapper[4998]: I0203 07:08:39.750069 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:39 crc kubenswrapper[4998]: I0203 07:08:39.993964 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:08:40 crc kubenswrapper[4998]: I0203 07:08:40.028071 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:08:40 crc kubenswrapper[4998]: I0203 07:08:40.254959 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:08:42 crc kubenswrapper[4998]: I0203 07:08:42.754092 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:08:42 crc kubenswrapper[4998]: I0203 07:08:42.754423 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:08:43 crc kubenswrapper[4998]: I0203 07:08:43.558581 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:08:43 crc kubenswrapper[4998]: I0203 07:08:43.558900 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:08:44 crc kubenswrapper[4998]: I0203 07:08:44.573008 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:44 crc kubenswrapper[4998]: I0203 07:08:44.573008 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:08:47 crc kubenswrapper[4998]: I0203 07:08:47.285649 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 07:08:48 crc kubenswrapper[4998]: I0203 07:08:48.744657 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:08:48 crc kubenswrapper[4998]: I0203 07:08:48.745187 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:08:48 crc kubenswrapper[4998]: I0203 07:08:48.748653 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:08:48 crc kubenswrapper[4998]: I0203 07:08:48.755003 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:08:49 crc kubenswrapper[4998]: I0203 07:08:49.328853 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:08:49 crc kubenswrapper[4998]: I0203 07:08:49.338389 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:08:53 crc kubenswrapper[4998]: I0203 07:08:53.562649 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:08:53 crc kubenswrapper[4998]: I0203 07:08:53.563416 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:08:53 crc kubenswrapper[4998]: I0203 07:08:53.568405 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:08:54 crc kubenswrapper[4998]: I0203 07:08:54.373968 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.803871 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.806371 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.814902 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.939179 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.939555 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:03 crc kubenswrapper[4998]: I0203 07:09:03.939643 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plltc\" (UniqueName: \"kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.041546 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.041610 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plltc\" (UniqueName: \"kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.041674 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.042084 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.042142 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.068174 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plltc\" (UniqueName: \"kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc\") pod \"redhat-operators-b7g8v\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:04 crc kubenswrapper[4998]: I0203 07:09:04.140586 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:05 crc kubenswrapper[4998]: W0203 07:09:05.227189 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03de7e09_127d_4746_a01c_e5b6a3618014.slice/crio-938b29719134e5a4cde384d1ddf89bc5794bc61b4cc828cc7ec934f6d8fd0b48 WatchSource:0}: Error finding container 938b29719134e5a4cde384d1ddf89bc5794bc61b4cc828cc7ec934f6d8fd0b48: Status 404 returned error can't find the container with id 938b29719134e5a4cde384d1ddf89bc5794bc61b4cc828cc7ec934f6d8fd0b48 Feb 03 07:09:05 crc kubenswrapper[4998]: I0203 07:09:05.232845 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:05 crc kubenswrapper[4998]: I0203 07:09:05.473607 4998 generic.go:334] "Generic (PLEG): container finished" podID="03de7e09-127d-4746-a01c-e5b6a3618014" containerID="62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c" exitCode=0 Feb 03 07:09:05 crc kubenswrapper[4998]: I0203 07:09:05.473659 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerDied","Data":"62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c"} Feb 03 07:09:05 crc kubenswrapper[4998]: I0203 07:09:05.473685 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerStarted","Data":"938b29719134e5a4cde384d1ddf89bc5794bc61b4cc828cc7ec934f6d8fd0b48"} Feb 03 07:09:05 crc kubenswrapper[4998]: I0203 07:09:05.475484 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:09:06 crc kubenswrapper[4998]: I0203 07:09:06.483953 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerStarted","Data":"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043"} Feb 03 07:09:08 crc kubenswrapper[4998]: I0203 07:09:08.504680 4998 generic.go:334] "Generic (PLEG): container finished" podID="03de7e09-127d-4746-a01c-e5b6a3618014" containerID="c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043" exitCode=0 Feb 03 07:09:08 crc kubenswrapper[4998]: I0203 07:09:08.504756 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerDied","Data":"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043"} Feb 03 07:09:10 crc kubenswrapper[4998]: I0203 07:09:10.559798 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerStarted","Data":"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678"} Feb 03 07:09:10 crc kubenswrapper[4998]: I0203 07:09:10.619668 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b7g8v" podStartSLOduration=3.639206565 podStartE2EDuration="7.619651566s" podCreationTimestamp="2026-02-03 07:09:03 +0000 UTC" firstStartedPulling="2026-02-03 07:09:05.475209554 +0000 UTC m=+1383.761903360" lastFinishedPulling="2026-02-03 07:09:09.455654555 +0000 UTC m=+1387.742348361" observedRunningTime="2026-02-03 07:09:10.619178442 +0000 UTC m=+1388.905872248" watchObservedRunningTime="2026-02-03 07:09:10.619651566 +0000 UTC m=+1388.906345362" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.754555 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.754865 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.854839 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-75s8j"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.895860 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.896142 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="58c44471-d442-4736-a649-c762a1c893fa" containerName="openstackclient" containerID="cri-o://254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805" gracePeriod=2 Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.914183 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.924386 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-d7mgx"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.937496 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-75s8j"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.949759 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:12 crc kubenswrapper[4998]: E0203 07:09:12.950179 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58c44471-d442-4736-a649-c762a1c893fa" containerName="openstackclient" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.950194 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="58c44471-d442-4736-a649-c762a1c893fa" containerName="openstackclient" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.950360 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="58c44471-d442-4736-a649-c762a1c893fa" containerName="openstackclient" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.950962 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.959215 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.966001 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-d7mgx"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.978339 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.980113 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.993851 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:12 crc kubenswrapper[4998]: I0203 07:09:12.994857 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.004071 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.023979 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e892-account-create-update-2vvtl"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.047203 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-e892-account-create-update-2vvtl"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.067983 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.068194 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grrlc\" (UniqueName: \"kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.122915 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.124303 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.131072 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.147998 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.149357 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.153146 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.169110 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.170467 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grrlc\" (UniqueName: \"kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.170737 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.170959 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.171069 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4dwl\" (UniqueName: \"kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.171738 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.217111 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.251748 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grrlc\" (UniqueName: \"kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc\") pod \"nova-api-d2c0-account-create-update-688tk\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.283414 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-6hsv7"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.283898 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.283974 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qfgd\" (UniqueName: \"kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.284011 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.284035 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nzcb\" (UniqueName: \"kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.284087 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4dwl\" (UniqueName: \"kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.284140 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.285427 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.286233 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.360576 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4dwl\" (UniqueName: \"kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl\") pod \"nova-cell1-9147-account-create-update-5gj8v\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.365963 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.373230 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-6hsv7"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.384843 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.386347 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.389595 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.394750 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="openstack-network-exporter" containerID="cri-o://7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5" gracePeriod=300 Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.396324 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nzcb\" (UniqueName: \"kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.396423 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.396490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.396545 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qfgd\" (UniqueName: \"kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.398284 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.406484 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.408606 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.414585 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-47p8b"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.420029 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.427199 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qfgd\" (UniqueName: \"kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd\") pod \"nova-cell0-65da-account-create-update-mnzrq\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.430639 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-47p8b"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.441402 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nzcb\" (UniqueName: \"kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb\") pod \"barbican-e892-account-create-update-khr4s\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.451475 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.498496 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.498537 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bh4z\" (UniqueName: \"kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: E0203 07:09:13.499768 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:13 crc kubenswrapper[4998]: E0203 07:09:13.499842 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data podName:cc9d5160-2c51-474c-aca1-1af693753ee8 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:13.999823439 +0000 UTC m=+1392.286517315 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data") pod "rabbitmq-cell1-server-0" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8") : configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.501652 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.524329 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-kvz5p"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.655571 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.670944 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.671004 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bh4z\" (UniqueName: \"kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.760880 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.782653 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-kvz5p"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.871197 4998 generic.go:334] "Generic (PLEG): container finished" podID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerID="7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5" exitCode=2 Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.871253 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerDied","Data":"7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5"} Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.889040 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f8d7-account-create-update-gctw4"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.912457 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f8d7-account-create-update-gctw4"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.916596 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="ovsdbserver-nb" containerID="cri-o://a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb" gracePeriod=300 Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.939142 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b3a0-account-create-update-2r7bm"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.955573 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b3a0-account-create-update-2r7bm"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.986470 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.987136 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="ovn-northd" containerID="cri-o://dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" gracePeriod=30 Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.987240 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="openstack-network-exporter" containerID="cri-o://a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db" gracePeriod=30 Feb 03 07:09:13 crc kubenswrapper[4998]: I0203 07:09:13.988326 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bh4z\" (UniqueName: \"kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z\") pod \"root-account-create-update-rv82q\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.026604 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-ml2n9"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.045402 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-ml2n9"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.055962 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-6pdsl"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.068918 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-6pdsl"] Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.069948 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.070010 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data podName:cc9d5160-2c51-474c-aca1-1af693753ee8 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:15.069992279 +0000 UTC m=+1393.356686085 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data") pod "rabbitmq-cell1-server-0" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8") : configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.081033 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jgnc7"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.090424 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jgnc7"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.111473 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhwfb"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.121325 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhwfb"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.141321 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.150258 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.190904 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.256395 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.259371 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.259577 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-qtllb" podUID="9a85f317-a6ed-4d19-8222-136fda8b4517" containerName="openstack-network-exporter" containerID="cri-o://2647dc3bcf763bab29c7e6b082b05a86b066ea5c8e34ab43d3543cc7fce88f34" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.309268 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.350599 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-5rpgx"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.367482 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-5rpgx"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.402082 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.425564 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sstns"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.553253 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c5507ca-3689-4044-8e7c-37627a2f2759" path="/var/lib/kubelet/pods/0c5507ca-3689-4044-8e7c-37627a2f2759/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.557814 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e86027e-f5e6-40ba-af5a-275b9087dcfd" path="/var/lib/kubelet/pods/0e86027e-f5e6-40ba-af5a-275b9087dcfd/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.558659 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f02cdb5-1d56-4077-9630-7fc6eaa53bc9" path="/var/lib/kubelet/pods/0f02cdb5-1d56-4077-9630-7fc6eaa53bc9/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.561059 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="437ab265-6a33-4c00-8afc-707827fcf7d3" path="/var/lib/kubelet/pods/437ab265-6a33-4c00-8afc-707827fcf7d3/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.561754 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="537b4446-c59e-4c79-9f65-2221ddb6783c" path="/var/lib/kubelet/pods/537b4446-c59e-4c79-9f65-2221ddb6783c/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.562373 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5888222d-3a6a-4e9c-8dd2-543d2029826e" path="/var/lib/kubelet/pods/5888222d-3a6a-4e9c-8dd2-543d2029826e/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.563933 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5919579f-48cb-4d2f-94b6-6277af39d739" path="/var/lib/kubelet/pods/5919579f-48cb-4d2f-94b6-6277af39d739/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.565390 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c35d79b-cda7-4d32-aa7e-208fb8a12e15" path="/var/lib/kubelet/pods/8c35d79b-cda7-4d32-aa7e-208fb8a12e15/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.566467 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1e61909-9629-4f0e-8393-508ce99aff63" path="/var/lib/kubelet/pods/a1e61909-9629-4f0e-8393-508ce99aff63/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.571929 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c79d6b94-62d5-41a1-ae40-acec75234d16" path="/var/lib/kubelet/pods/c79d6b94-62d5-41a1-ae40-acec75234d16/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.574124 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dac702ac-33bf-46f4-966d-41573aaf1c28" path="/var/lib/kubelet/pods/dac702ac-33bf-46f4-966d-41573aaf1c28/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.575485 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4c4e482-8eb9-4fa6-8074-7fd0bea98618" path="/var/lib/kubelet/pods/e4c4e482-8eb9-4fa6-8074-7fd0bea98618/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.576322 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa049756-1151-4b26-8f4a-669b4bf01ac3" path="/var/lib/kubelet/pods/fa049756-1151-4b26-8f4a-669b4bf01ac3/volumes" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.578985 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sstns"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.579035 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.579053 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.579065 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.579088 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.579102 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.595970 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="openstack-network-exporter" containerID="cri-o://4f8ef037c2d0e4e053ae59278fde31f26ecb573b9d514deb328a8f1afc372f98" gracePeriod=300 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.596371 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-log" containerID="cri-o://8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.596670 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api-log" containerID="cri-o://7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.596857 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-httpd" containerID="cri-o://0f64922061bc19d3ba8a787c55577aa51eb3d22b40d2c5ba7c8080728dee564e" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.597000 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api" containerID="cri-o://32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.597026 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="cinder-scheduler" containerID="cri-o://b9cf13c79eeb6224fa7a06ecf85b9c0950a6c413aa4a0ee378c2547496f98817" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.597196 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="dnsmasq-dns" containerID="cri-o://0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae" gracePeriod=10 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.597256 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="probe" containerID="cri-o://232bbb0f6faa06716e49e70604c13f8b54cf7576fa7c9e4b6b3cd621731e2ac5" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.617985 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-f31a-account-create-update-g2z99"] Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.639256 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.639348 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data podName:59f5a5d7-787a-4941-a2d3-2fe8db65cb31 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:15.139325236 +0000 UTC m=+1393.426019042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data") pod "rabbitmq-server-0" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31") : configmap "rabbitmq-config-data" not found Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.718239 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-f31a-account-create-update-g2z99"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.727753 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mq5f7"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.745612 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mq5f7"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.793532 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.793769 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-log" containerID="cri-o://f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.794161 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-httpd" containerID="cri-o://9da387018f31a72ba944e277abd1e671ca26b317532692abef949c8149cd34b5" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.801503 4998 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-sm6db" message=< Feb 03 07:09:14 crc kubenswrapper[4998]: Exiting ovn-controller (1) [ OK ] Feb 03 07:09:14 crc kubenswrapper[4998]: > Feb 03 07:09:14 crc kubenswrapper[4998]: E0203 07:09:14.801538 4998 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-sm6db" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" containerID="cri-o://a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351" Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.801575 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-sm6db" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" containerID="cri-o://a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.821469 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-nkj98"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.834179 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-nkj98"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.855825 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.856104 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-log" containerID="cri-o://6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.856152 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-api" containerID="cri-o://f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.874866 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.875115 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-594c6c97c7-9bqhd" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-api" containerID="cri-o://f6359e2d805c5da84deb6d70b3a2cf1b151546db9cb67ce6157717737983ed9d" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.875843 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-594c6c97c7-9bqhd" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-httpd" containerID="cri-o://ac85fb57d382b091b0ec3df49b55d69c4fe4553fabf64a2bf4ffa916b408f777" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.930671 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-2g7ft"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.942998 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.943032 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-2g7ft"] Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.945916 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-server" containerID="cri-o://0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946153 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-updater" containerID="cri-o://a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946277 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="swift-recon-cron" containerID="cri-o://8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946340 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="rsync" containerID="cri-o://91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946386 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-expirer" containerID="cri-o://ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946427 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-updater" containerID="cri-o://81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946469 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-auditor" containerID="cri-o://d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946510 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-replicator" containerID="cri-o://10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946558 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-server" containerID="cri-o://39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946625 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-reaper" containerID="cri-o://1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946673 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-auditor" containerID="cri-o://5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946715 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-replicator" containerID="cri-o://e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946758 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-server" containerID="cri-o://ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946839 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-replicator" containerID="cri-o://3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.946889 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-auditor" containerID="cri-o://3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de" gracePeriod=30 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.975970 4998 generic.go:334] "Generic (PLEG): container finished" podID="07088226-5029-4477-a6e1-85fd28c08f4b" containerID="a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db" exitCode=2 Feb 03 07:09:14 crc kubenswrapper[4998]: I0203 07:09:14.976054 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerDied","Data":"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.036522 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.061078 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="ovsdbserver-sb" containerID="cri-o://f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097" gracePeriod=300 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.088969 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.089305 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" containerID="cri-o://e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.089855 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" containerID="cri-o://df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.101911 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.101993 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data podName:cc9d5160-2c51-474c-aca1-1af693753ee8 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:17.101973972 +0000 UTC m=+1395.388667778 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data") pod "rabbitmq-cell1-server-0" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8") : configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.105409 4998 generic.go:334] "Generic (PLEG): container finished" podID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerID="8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba" exitCode=143 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.105480 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerDied","Data":"8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.107479 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-11d4-account-create-update-s4js8"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.139085 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-11d4-account-create-update-s4js8"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.159090 4998 generic.go:334] "Generic (PLEG): container finished" podID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerID="0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae" exitCode=0 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.159200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" event={"ID":"f61ec1cb-5d51-4c91-89a6-cbdb2d290838","Type":"ContainerDied","Data":"0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182054 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c/ovsdbserver-nb/0.log" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182104 4998 generic.go:334] "Generic (PLEG): container finished" podID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerID="a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb" exitCode=143 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182220 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182253 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerDied","Data":"a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182511 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55ccbc8794-9m7vc" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-log" containerID="cri-o://dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.182949 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55ccbc8794-9m7vc" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-api" containerID="cri-o://115a3825a27cab7fdfb490f904a7aeed06539b9f2cfa910218ec08fed15d9afa" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.209705 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.209767 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data podName:59f5a5d7-787a-4941-a2d3-2fe8db65cb31 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:16.209749805 +0000 UTC m=+1394.496443611 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data") pod "rabbitmq-server-0" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31") : configmap "rabbitmq-config-data" not found Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.218113 4998 generic.go:334] "Generic (PLEG): container finished" podID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerID="7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2" exitCode=143 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.218216 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerDied","Data":"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.255695 4998 generic.go:334] "Generic (PLEG): container finished" podID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerID="a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351" exitCode=0 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.255966 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db" event={"ID":"5a08e2bf-f0a7-4812-8137-c305d886f174","Type":"ContainerDied","Data":"a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.276121 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qtllb_9a85f317-a6ed-4d19-8222-136fda8b4517/openstack-network-exporter/0.log" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.276170 4998 generic.go:334] "Generic (PLEG): container finished" podID="9a85f317-a6ed-4d19-8222-136fda8b4517" containerID="2647dc3bcf763bab29c7e6b082b05a86b066ea5c8e34ab43d3543cc7fce88f34" exitCode=2 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.276262 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qtllb" event={"ID":"9a85f317-a6ed-4d19-8222-136fda8b4517","Type":"ContainerDied","Data":"2647dc3bcf763bab29c7e6b082b05a86b066ea5c8e34ab43d3543cc7fce88f34"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.310899 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qkqjf"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.319291 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qkqjf"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.327090 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5hsmh"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.333153 4998 generic.go:334] "Generic (PLEG): container finished" podID="94b790fb-3209-436d-b48d-f3978a82a557" containerID="4f8ef037c2d0e4e053ae59278fde31f26ecb573b9d514deb328a8f1afc372f98" exitCode=2 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.333212 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerDied","Data":"4f8ef037c2d0e4e053ae59278fde31f26ecb573b9d514deb328a8f1afc372f98"} Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.352389 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5hsmh"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.370299 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.370362 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.370562 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="eaf9e314-f762-45c1-bab5-3cec9661a787" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://daec52a53c154164ed7086c02f290ab1ac36c8f53a2d65c9b1098d2717904264" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.377360 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-6799k"] Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.390088 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: if [ -n "nova_api" ]; then Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="nova_api" Feb 03 07:09:15 crc kubenswrapper[4998]: else Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:15 crc kubenswrapper[4998]: fi Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:15 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:15 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:15 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:15 crc kubenswrapper[4998]: # support updates Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.391204 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-d2c0-account-create-update-688tk" podUID="daa99d07-07ba-49aa-82c3-8bcfca0c3564" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.398654 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-6799k"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.409404 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.409673 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-78755df597-h9t98" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker-log" containerID="cri-o://e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.409836 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-78755df597-h9t98" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker" containerID="cri-o://5b99f558d74ddfe530b75fa587287403c988c189202261dc57a486311499d5c9" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.420531 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.429411 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.451140 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-vf44v"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.520620 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-87bbz"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.547879 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="galera" containerID="cri-o://5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.575222 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-88lhc"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.580149 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b7g8v" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="registry-server" probeResult="failure" output=< Feb 03 07:09:15 crc kubenswrapper[4998]: timeout: failed to connect service ":50051" within 1s Feb 03 07:09:15 crc kubenswrapper[4998]: > Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.590879 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-87bbz"] Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.618196 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: if [ -n "barbican" ]; then Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="barbican" Feb 03 07:09:15 crc kubenswrapper[4998]: else Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:15 crc kubenswrapper[4998]: fi Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:15 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:15 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:15 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:15 crc kubenswrapper[4998]: # support updates Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.618580 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: if [ -n "nova_cell0" ]; then Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="nova_cell0" Feb 03 07:09:15 crc kubenswrapper[4998]: else Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:15 crc kubenswrapper[4998]: fi Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:15 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:15 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:15 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:15 crc kubenswrapper[4998]: # support updates Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.619313 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-e892-account-create-update-khr4s" podUID="0bdb467a-423f-4374-8f68-0be7157d1402" Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.619654 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" podUID="df47c2af-c1ee-4ab7-9691-ea6e28fa1831" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.652356 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-vf44v"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.659981 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.660246 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-d9fff78f-v2ccx" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-httpd" containerID="cri-o://8783d1fdb78c71bed87bdaf085a3d90306855381ef2f20915e65a6d24c97055f" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.660718 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-d9fff78f-v2ccx" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-server" containerID="cri-o://7ac91621be4ad73c5875bb4f2e805c6f9619e9a22ac99cac808fd03dd2ffd425" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.692203 4998 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 03 07:09:15 crc kubenswrapper[4998]: + source /usr/local/bin/container-scripts/functions Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNBridge=br-int Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNRemote=tcp:localhost:6642 Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNEncapType=geneve Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNAvailabilityZones= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ EnableChassisAsGateway=true Feb 03 07:09:15 crc kubenswrapper[4998]: ++ PhysicalNetworks= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNHostName= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 03 07:09:15 crc kubenswrapper[4998]: ++ ovs_dir=/var/lib/openvswitch Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 03 07:09:15 crc kubenswrapper[4998]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + cleanup_ovsdb_server_semaphore Feb 03 07:09:15 crc kubenswrapper[4998]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 03 07:09:15 crc kubenswrapper[4998]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-t4p58" message=< Feb 03 07:09:15 crc kubenswrapper[4998]: Exiting ovsdb-server (5) [ OK ] Feb 03 07:09:15 crc kubenswrapper[4998]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 03 07:09:15 crc kubenswrapper[4998]: + source /usr/local/bin/container-scripts/functions Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNBridge=br-int Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNRemote=tcp:localhost:6642 Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNEncapType=geneve Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNAvailabilityZones= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ EnableChassisAsGateway=true Feb 03 07:09:15 crc kubenswrapper[4998]: ++ PhysicalNetworks= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNHostName= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 03 07:09:15 crc kubenswrapper[4998]: ++ ovs_dir=/var/lib/openvswitch Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 03 07:09:15 crc kubenswrapper[4998]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + cleanup_ovsdb_server_semaphore Feb 03 07:09:15 crc kubenswrapper[4998]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 03 07:09:15 crc kubenswrapper[4998]: > Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.692249 4998 kuberuntime_container.go:691] "PreStop hook failed" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 03 07:09:15 crc kubenswrapper[4998]: + source /usr/local/bin/container-scripts/functions Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNBridge=br-int Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNRemote=tcp:localhost:6642 Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNEncapType=geneve Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNAvailabilityZones= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ EnableChassisAsGateway=true Feb 03 07:09:15 crc kubenswrapper[4998]: ++ PhysicalNetworks= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ OVNHostName= Feb 03 07:09:15 crc kubenswrapper[4998]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 03 07:09:15 crc kubenswrapper[4998]: ++ ovs_dir=/var/lib/openvswitch Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 03 07:09:15 crc kubenswrapper[4998]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 03 07:09:15 crc kubenswrapper[4998]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + sleep 0.5 Feb 03 07:09:15 crc kubenswrapper[4998]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 03 07:09:15 crc kubenswrapper[4998]: + cleanup_ovsdb_server_semaphore Feb 03 07:09:15 crc kubenswrapper[4998]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 03 07:09:15 crc kubenswrapper[4998]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 03 07:09:15 crc kubenswrapper[4998]: > pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" containerID="cri-o://f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.692283 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" containerID="cri-o://f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" gracePeriod=29 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.697909 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-88lhc"] Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.705326 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf61ec1cb_5d51_4c91_89a6_cbdb2d290838.slice/crio-conmon-0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4767a1f8_263b_4c49_8cb7_7a7f9b8271dc.slice/crio-8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode603cc71_c5b7_4f97_9ed9_3c6d114ddb8c.slice/crio-7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode603cc71_c5b7_4f97_9ed9_3c6d114ddb8c.slice/crio-conmon-a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc122d5d6_c472_46c4_9baf_195893bff38a.slice/crio-conmon-dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a08e2bf_f0a7_4812_8137_c305d886f174.slice/crio-a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59162297_8dd9_4ddd_a18b_8045d2f6c610.slice/crio-conmon-7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode603cc71_c5b7_4f97_9ed9_3c6d114ddb8c.slice/crio-conmon-7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18701d06_8e80_4822_9128_dd9ba0e5bf1c.slice/crio-conmon-f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58c44471_d442_4736_a649_c762a1c893fa.slice/crio-254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07088226_5029_4477_a6e1_85fd28c08f4b.slice/crio-conmon-a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94b790fb_3209_436d_b48d_f3978a82a557.slice/crio-f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0df5b57a_e165_41ef_8e19_30b87b9566f3.slice/crio-e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4767a1f8_263b_4c49_8cb7_7a7f9b8271dc.slice/crio-conmon-8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07088226_5029_4477_a6e1_85fd28c08f4b.slice/crio-a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e51da52_0dd9_4394_bb81_c4a1e534ad17.slice/crio-e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd9158_f279_4ac0_b8fe_0121e85a1b20.slice/crio-conmon-1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode603cc71_c5b7_4f97_9ed9_3c6d114ddb8c.slice/crio-a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1280d3d_d626_4af9_b262_93fea6a5bbc9.slice/crio-6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425.scope\": RecentStats: unable to find data in memory cache]" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.727648 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.744897 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.744962 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.746961 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener-log" containerID="cri-o://cabb5051e92ea728b58b2e5f6c017812f04640a06b66da9b92064814d7881f01" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.747158 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener" containerID="cri-o://59b03e0b0b59b7355fdc4f5e7fb6fc2a865970667616376aeba4c7993df8fea4" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.756554 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.761195 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" containerID="cri-o://5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" gracePeriod=29 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.809692 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.809992 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-747cb48568-vkq22" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api-log" containerID="cri-o://2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.810860 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-747cb48568-vkq22" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api" containerID="cri-o://240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.819142 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="rabbitmq" containerID="cri-o://0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2" gracePeriod=604800 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.819399 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.819891 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerName="nova-scheduler-scheduler" containerID="cri-o://4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.829105 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.829373 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerName="nova-cell0-conductor-conductor" containerID="cri-o://659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.834632 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sqllz"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.846140 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sqllz"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.851219 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.851429 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" gracePeriod=30 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.857476 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kmnkm"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.858407 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c/ovsdbserver-nb/0.log" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.858463 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.865755 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kmnkm"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.880288 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.903970 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.922208 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.927881 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.938953 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.970831 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:15 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: if [ -n "nova_cell1" ]; then Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="nova_cell1" Feb 03 07:09:15 crc kubenswrapper[4998]: else Feb 03 07:09:15 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:15 crc kubenswrapper[4998]: fi Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:15 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:15 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:15 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:15 crc kubenswrapper[4998]: # support updates Feb 03 07:09:15 crc kubenswrapper[4998]: Feb 03 07:09:15 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:15 crc kubenswrapper[4998]: E0203 07:09:15.972217 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" podUID="3e82a048-30ed-4db8-8977-220db05eff1a" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.973686 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qtllb_9a85f317-a6ed-4d19-8222-136fda8b4517/openstack-network-exporter/0.log" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.973734 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.980194 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="rabbitmq" containerID="cri-o://098c0a6d87cc827e487059fcc544a85b758f02d99c658f66e5fbf5c96ee6f34c" gracePeriod=604800 Feb 03 07:09:15 crc kubenswrapper[4998]: I0203 07:09:15.984299 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.013724 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.044801 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.044932 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045003 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045069 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chx9k\" (UniqueName: \"kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045099 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045132 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045148 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.045231 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir\") pod \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\" (UID: \"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.047087 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.047860 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.054498 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config" (OuterVolumeSpecName: "config") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.058597 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts" (OuterVolumeSpecName: "scripts") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.065037 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.071711 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k" (OuterVolumeSpecName: "kube-api-access-chx9k") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "kube-api-access-chx9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.089496 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147605 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147707 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r2st\" (UniqueName: \"kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st\") pod \"58c44471-d442-4736-a649-c762a1c893fa\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147702 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147757 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147806 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147842 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147872 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147891 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkvsm\" (UniqueName: \"kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147919 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config\") pod \"58c44471-d442-4736-a649-c762a1c893fa\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147952 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle\") pod \"58c44471-d442-4736-a649-c762a1c893fa\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147978 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxsrh\" (UniqueName: \"kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.147996 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret\") pod \"58c44471-d442-4736-a649-c762a1c893fa\" (UID: \"58c44471-d442-4736-a649-c762a1c893fa\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148026 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148059 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148107 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148183 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148207 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148221 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148268 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn\") pod \"5a08e2bf-f0a7-4812-8137-c305d886f174\" (UID: \"5a08e2bf-f0a7-4812-8137-c305d886f174\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148300 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148316 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle\") pod \"9a85f317-a6ed-4d19-8222-136fda8b4517\" (UID: \"9a85f317-a6ed-4d19-8222-136fda8b4517\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148352 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148370 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km6ll\" (UniqueName: \"kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148390 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb\") pod \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\" (UID: \"f61ec1cb-5d51-4c91-89a6-cbdb2d290838\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148907 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148922 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chx9k\" (UniqueName: \"kubernetes.io/projected/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-kube-api-access-chx9k\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148940 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148949 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148975 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.148983 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.149094 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run" (OuterVolumeSpecName: "var-run") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.150352 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config" (OuterVolumeSpecName: "config") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.151051 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.151332 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts" (OuterVolumeSpecName: "scripts") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.156177 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm" (OuterVolumeSpecName: "kube-api-access-bkvsm") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "kube-api-access-bkvsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.156860 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.156877 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.160482 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll" (OuterVolumeSpecName: "kube-api-access-km6ll") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "kube-api-access-km6ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.175812 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh" (OuterVolumeSpecName: "kube-api-access-rxsrh") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "kube-api-access-rxsrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.177691 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st" (OuterVolumeSpecName: "kube-api-access-9r2st") pod "58c44471-d442-4736-a649-c762a1c893fa" (UID: "58c44471-d442-4736-a649-c762a1c893fa"). InnerVolumeSpecName "kube-api-access-9r2st". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.251545 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251565 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km6ll\" (UniqueName: \"kubernetes.io/projected/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-kube-api-access-km6ll\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251599 4998 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovs-rundir\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.251622 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data podName:59f5a5d7-787a-4941-a2d3-2fe8db65cb31 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:18.251601547 +0000 UTC m=+1396.538295363 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data") pod "rabbitmq-server-0" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31") : configmap "rabbitmq-config-data" not found Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251651 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r2st\" (UniqueName: \"kubernetes.io/projected/58c44471-d442-4736-a649-c762a1c893fa-kube-api-access-9r2st\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251664 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a08e2bf-f0a7-4812-8137-c305d886f174-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251676 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a85f317-a6ed-4d19-8222-136fda8b4517-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251686 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkvsm\" (UniqueName: \"kubernetes.io/projected/9a85f317-a6ed-4d19-8222-136fda8b4517-kube-api-access-bkvsm\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251695 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxsrh\" (UniqueName: \"kubernetes.io/projected/5a08e2bf-f0a7-4812-8137-c305d886f174-kube-api-access-rxsrh\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251704 4998 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251715 4998 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251723 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9a85f317-a6ed-4d19-8222-136fda8b4517-ovn-rundir\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.251731 4998 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a08e2bf-f0a7-4812-8137-c305d886f174-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.264642 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.323425 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58c44471-d442-4736-a649-c762a1c893fa" (UID: "58c44471-d442-4736-a649-c762a1c893fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.353610 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.353637 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.376808 4998 generic.go:334] "Generic (PLEG): container finished" podID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerID="232bbb0f6faa06716e49e70604c13f8b54cf7576fa7c9e4b6b3cd621731e2ac5" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.376880 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerDied","Data":"232bbb0f6faa06716e49e70604c13f8b54cf7576fa7c9e4b6b3cd621731e2ac5"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386846 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386886 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386897 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386905 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386916 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386923 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386933 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386941 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386949 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386972 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386980 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386987 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.386997 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387005 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387015 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387056 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387070 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387082 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387093 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387103 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387115 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387125 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387136 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387145 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387165 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387176 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.387188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.389839 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" event={"ID":"f61ec1cb-5d51-4c91-89a6-cbdb2d290838","Type":"ContainerDied","Data":"4dec98b26780c5870d9d64aac3ce278761abf24556a69dbde6d69b67dcf57158"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.389891 4998 scope.go:117] "RemoveContainer" containerID="0678f937a920be133969dc75e38cd8c33172cd7959d53982eb1d4b695fa391ae" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.389935 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b99f9475-2cfcp" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.396415 4998 generic.go:334] "Generic (PLEG): container finished" podID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerID="7ac91621be4ad73c5875bb4f2e805c6f9619e9a22ac99cac808fd03dd2ffd425" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.396443 4998 generic.go:334] "Generic (PLEG): container finished" podID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerID="8783d1fdb78c71bed87bdaf085a3d90306855381ef2f20915e65a6d24c97055f" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.396475 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerDied","Data":"7ac91621be4ad73c5875bb4f2e805c6f9619e9a22ac99cac808fd03dd2ffd425"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.396497 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerDied","Data":"8783d1fdb78c71bed87bdaf085a3d90306855381ef2f20915e65a6d24c97055f"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.399151 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c/ovsdbserver-nb/0.log" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.399208 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c","Type":"ContainerDied","Data":"aa92e997914c418cfb3dbeff957f565f5f74dc6c47ec4274b19102fc0ba144d2"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.399303 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.402836 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qtllb_9a85f317-a6ed-4d19-8222-136fda8b4517/openstack-network-exporter/0.log" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.402899 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qtllb" event={"ID":"9a85f317-a6ed-4d19-8222-136fda8b4517","Type":"ContainerDied","Data":"a1b68346aa52a9066bce516d34daec8fbec9a185b7e9ce9e45b4a1ff1cdade6b"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.402956 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qtllb" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.407533 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_94b790fb-3209-436d-b48d-f3978a82a557/ovsdbserver-sb/0.log" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.407585 4998 generic.go:334] "Generic (PLEG): container finished" podID="94b790fb-3209-436d-b48d-f3978a82a557" containerID="f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.407709 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerDied","Data":"f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.410331 4998 generic.go:334] "Generic (PLEG): container finished" podID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerID="2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.410404 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerDied","Data":"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.413925 4998 generic.go:334] "Generic (PLEG): container finished" podID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerID="f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.414013 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerDied","Data":"f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.417716 4998 generic.go:334] "Generic (PLEG): container finished" podID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.417827 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerDied","Data":"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.420097 4998 generic.go:334] "Generic (PLEG): container finished" podID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerID="e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.420151 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerDied","Data":"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.420220 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.430934 4998 scope.go:117] "RemoveContainer" containerID="107d045d204e2e40e70cddd937947abf2fe6811cfd97b27cdf2c86d591e10b28" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.431275 4998 generic.go:334] "Generic (PLEG): container finished" podID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerID="e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.432136 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config" (OuterVolumeSpecName: "config") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.436298 4998 generic.go:334] "Generic (PLEG): container finished" podID="eaf9e314-f762-45c1-bab5-3cec9661a787" containerID="daec52a53c154164ed7086c02f290ab1ac36c8f53a2d65c9b1098d2717904264" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.441692 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d737e69-6547-48c5-8bbf-7ca34468e8f6" path="/var/lib/kubelet/pods/0d737e69-6547-48c5-8bbf-7ca34468e8f6/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.442909 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21b5727f-2631-4151-acf1-9b77772d57ca" path="/var/lib/kubelet/pods/21b5727f-2631-4151-acf1-9b77772d57ca/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.444542 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a31c817-df72-4207-976b-1caee9f68a7c" path="/var/lib/kubelet/pods/2a31c817-df72-4207-976b-1caee9f68a7c/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.445229 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d815447-a2cd-470c-84d9-431e9971596a" path="/var/lib/kubelet/pods/2d815447-a2cd-470c-84d9-431e9971596a/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.446972 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="520a4ac6-b880-4576-8772-da70e0d8f99d" path="/var/lib/kubelet/pods/520a4ac6-b880-4576-8772-da70e0d8f99d/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.447829 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a33e3d6c-b78a-4335-bf80-e0f7171c5678" path="/var/lib/kubelet/pods/a33e3d6c-b78a-4335-bf80-e0f7171c5678/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.457988 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.458021 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.460074 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baf70899-28c3-49dd-93b9-5645b847eb91" path="/var/lib/kubelet/pods/baf70899-28c3-49dd-93b9-5645b847eb91/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.467254 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb875d5f-c265-49c5-a25d-76999819149b" path="/var/lib/kubelet/pods/bb875d5f-c265-49c5-a25d-76999819149b/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.468561 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3621768-9f07-459e-9d47-afd14d36900f" path="/var/lib/kubelet/pods/c3621768-9f07-459e-9d47-afd14d36900f/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.469232 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5284078-0b06-4c22-bf9b-87b31d8f5e0f" path="/var/lib/kubelet/pods/c5284078-0b06-4c22-bf9b-87b31d8f5e0f/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.469819 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e485e612-9e0b-4d19-a326-523613449a06" path="/var/lib/kubelet/pods/e485e612-9e0b-4d19-a326-523613449a06/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.470007 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "58c44471-d442-4736-a649-c762a1c893fa" (UID: "58c44471-d442-4736-a649-c762a1c893fa"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.474135 4998 scope.go:117] "RemoveContainer" containerID="7404cf26ae91b0be81d59a320980e944b738ce1db633c16325d9a0711dd3bad5" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.474198 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1ff745e-64b7-4f3d-8cf0-69dd338f1996" path="/var/lib/kubelet/pods/f1ff745e-64b7-4f3d-8cf0-69dd338f1996/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.475625 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.475688 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f25504d8-c4cd-467e-989f-cefa0f7f6607" path="/var/lib/kubelet/pods/f25504d8-c4cd-467e-989f-cefa0f7f6607/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.476294 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd95241e-3a56-48e9-9570-ac7eef566dd4" path="/var/lib/kubelet/pods/fd95241e-3a56-48e9-9570-ac7eef566dd4/volumes" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.477820 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="galera" probeResult="failure" output="" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.503042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerDied","Data":"e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.503089 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eaf9e314-f762-45c1-bab5-3cec9661a787","Type":"ContainerDied","Data":"daec52a53c154164ed7086c02f290ab1ac36c8f53a2d65c9b1098d2717904264"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.504165 4998 generic.go:334] "Generic (PLEG): container finished" podID="c122d5d6-c472-46c4-9baf-195893bff38a" containerID="dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.504228 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerDied","Data":"dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.508237 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.528684 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e892-account-create-update-khr4s" event={"ID":"0bdb467a-423f-4374-8f68-0be7157d1402","Type":"ContainerStarted","Data":"da37dd3ddf4fbc484cac61e3cb0e9e298b2b4002c82db0e39ab3d6ab9ff1c9b2"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.534450 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_94b790fb-3209-436d-b48d-f3978a82a557/ovsdbserver-sb/0.log" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.534514 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.534872 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.551850 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.558873 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "58c44471-d442-4736-a649-c762a1c893fa" (UID: "58c44471-d442-4736-a649-c762a1c893fa"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.558989 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.559076 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.564974 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.565143 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.565954 4998 generic.go:334] "Generic (PLEG): container finished" podID="58c44471-d442-4736-a649-c762a1c893fa" containerID="254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805" exitCode=137 Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.566043 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.566075 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.566638 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.572812 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f61ec1cb-5d51-4c91-89a6-cbdb2d290838" (UID: "f61ec1cb-5d51-4c91-89a6-cbdb2d290838"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573395 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573426 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58c44471-d442-4736-a649-c762a1c893fa-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573437 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573448 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58c44471-d442-4736-a649-c762a1c893fa-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573457 4998 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573465 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.573473 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f61ec1cb-5d51-4c91-89a6-cbdb2d290838-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.575064 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" (UID: "e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.603081 4998 generic.go:334] "Generic (PLEG): container finished" podID="13caad28-67cf-4251-9a98-e324e6f9722a" containerID="cabb5051e92ea728b58b2e5f6c017812f04640a06b66da9b92064814d7881f01" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.603202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerDied","Data":"cabb5051e92ea728b58b2e5f6c017812f04640a06b66da9b92064814d7881f01"} Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.604519 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.604591 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.611940 4998 scope.go:117] "RemoveContainer" containerID="a8e05668b7efcd571ea3d4d5d93de7fe8edf60bb34bd662cf3ee577c3923dcdb" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.615905 4998 generic.go:334] "Generic (PLEG): container finished" podID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerID="6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425" exitCode=143 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.616022 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerDied","Data":"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.632624 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.633179 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" event={"ID":"3e82a048-30ed-4db8-8977-220db05eff1a","Type":"ContainerStarted","Data":"2f66e4e2b018e3f2d66f0752fd4b7a0ca5a750cc39325ca54c9db2ead533fec8"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.652536 4998 generic.go:334] "Generic (PLEG): container finished" podID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerID="ac85fb57d382b091b0ec3df49b55d69c4fe4553fabf64a2bf4ffa916b408f777" exitCode=0 Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.652597 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerDied","Data":"ac85fb57d382b091b0ec3df49b55d69c4fe4553fabf64a2bf4ffa916b408f777"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.660980 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9a85f317-a6ed-4d19-8222-136fda8b4517" (UID: "9a85f317-a6ed-4d19-8222-136fda8b4517"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.678494 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.678639 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.678731 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.678825 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.678916 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.679004 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqb2r\" (UniqueName: \"kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.679075 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.679160 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs\") pod \"94b790fb-3209-436d-b48d-f3978a82a557\" (UID: \"94b790fb-3209-436d-b48d-f3978a82a557\") " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.680820 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.680951 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.680983 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a85f317-a6ed-4d19-8222-136fda8b4517-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.682509 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.689808 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config" (OuterVolumeSpecName: "config") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.689907 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts" (OuterVolumeSpecName: "scripts") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.691341 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-sm6db" event={"ID":"5a08e2bf-f0a7-4812-8137-c305d886f174","Type":"ContainerDied","Data":"f39f961bafcf52c55a3146606cc5171ad1de7de8b921f0c81a04745c8749b887"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.691448 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-sm6db" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.699596 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" event={"ID":"df47c2af-c1ee-4ab7-9691-ea6e28fa1831","Type":"ContainerStarted","Data":"77b7d546b82831e3b3aa93b63870138790207ad655c4881ba84dc5413744d2fb"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.699728 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.710637 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d2c0-account-create-update-688tk" event={"ID":"daa99d07-07ba-49aa-82c3-8bcfca0c3564","Type":"ContainerStarted","Data":"f90d044887a61c80acf492df536cfbf05ce80fcd042d7767a21567c3dd1cc9a8"} Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.711855 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r" (OuterVolumeSpecName: "kube-api-access-lqb2r") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "kube-api-access-lqb2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.712394 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:16 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: if [ -n "nova_cell0" ]; then Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="nova_cell0" Feb 03 07:09:16 crc kubenswrapper[4998]: else Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:16 crc kubenswrapper[4998]: fi Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:16 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:16 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:16 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:16 crc kubenswrapper[4998]: # support updates Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.714654 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" podUID="df47c2af-c1ee-4ab7-9691-ea6e28fa1831" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.740056 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:16 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: if [ -n "nova_api" ]; then Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="nova_api" Feb 03 07:09:16 crc kubenswrapper[4998]: else Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:16 crc kubenswrapper[4998]: fi Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:16 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:16 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:16 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:16 crc kubenswrapper[4998]: # support updates Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.743029 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-d2c0-account-create-update-688tk" podUID="daa99d07-07ba-49aa-82c3-8bcfca0c3564" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.789761 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.790187 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "5a08e2bf-f0a7-4812-8137-c305d886f174" (UID: "5a08e2bf-f0a7-4812-8137-c305d886f174"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793114 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a08e2bf-f0a7-4812-8137-c305d886f174-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793158 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793192 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793207 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793219 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b790fb-3209-436d-b48d-f3978a82a557-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793231 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqb2r\" (UniqueName: \"kubernetes.io/projected/94b790fb-3209-436d-b48d-f3978a82a557-kube-api-access-lqb2r\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793241 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b790fb-3209-436d-b48d-f3978a82a557-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.793795 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.818590 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:16 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: if [ -n "" ]; then Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="" Feb 03 07:09:16 crc kubenswrapper[4998]: else Feb 03 07:09:16 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:16 crc kubenswrapper[4998]: fi Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:16 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:16 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:16 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:16 crc kubenswrapper[4998]: # support updates Feb 03 07:09:16 crc kubenswrapper[4998]: Feb 03 07:09:16 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:16 crc kubenswrapper[4998]: E0203 07:09:16.820141 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-rv82q" podUID="28b1541e-d0f2-46fa-8518-554fb699ab4e" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.882418 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.892430 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.902003 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "94b790fb-3209-436d-b48d-f3978a82a557" (UID: "94b790fb-3209-436d-b48d-f3978a82a557"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.903119 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.903136 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/94b790fb-3209-436d-b48d-f3978a82a557-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.903145 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.963643 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.994934 4998 scope.go:117] "RemoveContainer" containerID="2647dc3bcf763bab29c7e6b082b05a86b066ea5c8e34ab43d3543cc7fce88f34" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.998689 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:09:16 crc kubenswrapper[4998]: I0203 07:09:16.999023 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.014808 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74b99f9475-2cfcp"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.053404 4998 scope.go:117] "RemoveContainer" containerID="254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.060937 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.086929 4998 scope.go:117] "RemoveContainer" containerID="254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805" Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.089946 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805\": container with ID starting with 254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805 not found: ID does not exist" containerID="254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.089997 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805"} err="failed to get container status \"254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805\": rpc error: code = NotFound desc = could not find container \"254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805\": container with ID starting with 254b248ad815a3de6f3228e0bf342af87d6cff65d3356b30e77a551b768ab805 not found: ID does not exist" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.090033 4998 scope.go:117] "RemoveContainer" containerID="a058d234acce401bbcee892b1ab5a2ecf48b4ab883b5706dc40ddcd103e12351" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.093647 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107044 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107089 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data\") pod \"eaf9e314-f762-45c1-bab5-3cec9661a787\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107124 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tjkz\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107219 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107318 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107380 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107496 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107568 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs\") pod \"eaf9e314-f762-45c1-bab5-3cec9661a787\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107748 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.107854 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs\") pod \"eaf9e314-f762-45c1-bab5-3cec9661a787\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.108014 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgkgf\" (UniqueName: \"kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf\") pod \"eaf9e314-f762-45c1-bab5-3cec9661a787\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.108086 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle\") pod \"eaf9e314-f762-45c1-bab5-3cec9661a787\" (UID: \"eaf9e314-f762-45c1-bab5-3cec9661a787\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.108145 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd\") pod \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\" (UID: \"88a17ed1-2cc7-488d-a325-67f99d3a12d8\") " Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.109168 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.109264 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data podName:cc9d5160-2c51-474c-aca1-1af693753ee8 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:21.109244741 +0000 UTC m=+1399.395938547 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data") pod "rabbitmq-cell1-server-0" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8") : configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.129049 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.132506 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.134642 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz" (OuterVolumeSpecName: "kube-api-access-5tjkz") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "kube-api-access-5tjkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.137653 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf" (OuterVolumeSpecName: "kube-api-access-bgkgf") pod "eaf9e314-f762-45c1-bab5-3cec9661a787" (UID: "eaf9e314-f762-45c1-bab5-3cec9661a787"). InnerVolumeSpecName "kube-api-access-bgkgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.147801 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.174323 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.184353 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.190597 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-qtllb"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.199124 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.212917 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.213217 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgkgf\" (UniqueName: \"kubernetes.io/projected/eaf9e314-f762-45c1-bab5-3cec9661a787-kube-api-access-bgkgf\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.213319 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88a17ed1-2cc7-488d-a325-67f99d3a12d8-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.213596 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tjkz\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-kube-api-access-5tjkz\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.213728 4998 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/88a17ed1-2cc7-488d-a325-67f99d3a12d8-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.229456 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.232362 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eaf9e314-f762-45c1-bab5-3cec9661a787" (UID: "eaf9e314-f762-45c1-bab5-3cec9661a787"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.235669 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.237133 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.238107 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-sm6db"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.239556 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data" (OuterVolumeSpecName: "config-data") pod "eaf9e314-f762-45c1-bab5-3cec9661a787" (UID: "eaf9e314-f762-45c1-bab5-3cec9661a787"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.275077 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "eaf9e314-f762-45c1-bab5-3cec9661a787" (UID: "eaf9e314-f762-45c1-bab5-3cec9661a787"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.281642 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "eaf9e314-f762-45c1-bab5-3cec9661a787" (UID: "eaf9e314-f762-45c1-bab5-3cec9661a787"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.284004 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data" (OuterVolumeSpecName: "config-data") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.328204 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4dwl\" (UniqueName: \"kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl\") pod \"3e82a048-30ed-4db8-8977-220db05eff1a\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.328255 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts\") pod \"0bdb467a-423f-4374-8f68-0be7157d1402\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.328386 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts\") pod \"3e82a048-30ed-4db8-8977-220db05eff1a\" (UID: \"3e82a048-30ed-4db8-8977-220db05eff1a\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.328443 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nzcb\" (UniqueName: \"kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb\") pod \"0bdb467a-423f-4374-8f68-0be7157d1402\" (UID: \"0bdb467a-423f-4374-8f68-0be7157d1402\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329032 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329056 4998 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329069 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329081 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329091 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329101 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329113 4998 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf9e314-f762-45c1-bab5-3cec9661a787-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.329023 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0bdb467a-423f-4374-8f68-0be7157d1402" (UID: "0bdb467a-423f-4374-8f68-0be7157d1402"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.331547 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3e82a048-30ed-4db8-8977-220db05eff1a" (UID: "3e82a048-30ed-4db8-8977-220db05eff1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.332773 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb" (OuterVolumeSpecName: "kube-api-access-6nzcb") pod "0bdb467a-423f-4374-8f68-0be7157d1402" (UID: "0bdb467a-423f-4374-8f68-0be7157d1402"). InnerVolumeSpecName "kube-api-access-6nzcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.336517 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "88a17ed1-2cc7-488d-a325-67f99d3a12d8" (UID: "88a17ed1-2cc7-488d-a325-67f99d3a12d8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.339301 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl" (OuterVolumeSpecName: "kube-api-access-n4dwl") pod "3e82a048-30ed-4db8-8977-220db05eff1a" (UID: "3e82a048-30ed-4db8-8977-220db05eff1a"). InnerVolumeSpecName "kube-api-access-n4dwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.376296 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.437062 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e82a048-30ed-4db8-8977-220db05eff1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.437104 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nzcb\" (UniqueName: \"kubernetes.io/projected/0bdb467a-423f-4374-8f68-0be7157d1402-kube-api-access-6nzcb\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.437113 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/88a17ed1-2cc7-488d-a325-67f99d3a12d8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.437122 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4dwl\" (UniqueName: \"kubernetes.io/projected/3e82a048-30ed-4db8-8977-220db05eff1a-kube-api-access-n4dwl\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.437132 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bdb467a-423f-4374-8f68-0be7157d1402-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.541285 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.541369 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.541447 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.542933 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543013 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543049 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543077 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khxtl\" (UniqueName: \"kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543154 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated\") pod \"4485520f-24df-4521-b5e9-6076ffa00bd6\" (UID: \"4485520f-24df-4521-b5e9-6076ffa00bd6\") " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543178 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.543867 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.544250 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.544633 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.544738 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.558029 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl" (OuterVolumeSpecName: "kube-api-access-khxtl") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "kube-api-access-khxtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.607852 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.638211 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "mysql-db") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648554 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648622 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khxtl\" (UniqueName: \"kubernetes.io/projected/4485520f-24df-4521-b5e9-6076ffa00bd6-kube-api-access-khxtl\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648640 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4485520f-24df-4521-b5e9-6076ffa00bd6-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648652 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648666 4998 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4485520f-24df-4521-b5e9-6076ffa00bd6-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.648712 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.652077 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4485520f-24df-4521-b5e9-6076ffa00bd6" (UID: "4485520f-24df-4521-b5e9-6076ffa00bd6"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.718100 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.734581 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eaf9e314-f762-45c1-bab5-3cec9661a787","Type":"ContainerDied","Data":"440427a42ba083aadfb2585dd39aed178b32b5152e139574aa5e99ccfe105d71"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.734674 4998 scope.go:117] "RemoveContainer" containerID="daec52a53c154164ed7086c02f290ab1ac36c8f53a2d65c9b1098d2717904264" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.734873 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.738228 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d9fff78f-v2ccx" event={"ID":"88a17ed1-2cc7-488d-a325-67f99d3a12d8","Type":"ContainerDied","Data":"bf548f38c34190d5c5768d28a8c422c39447b2fdc2bef928978e7995fbe7ceec"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.738325 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d9fff78f-v2ccx" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.752701 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.752733 4998 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4485520f-24df-4521-b5e9-6076ffa00bd6-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.763176 4998 generic.go:334] "Generic (PLEG): container finished" podID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerID="1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" exitCode=0 Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.763250 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7d04a830-0b11-4766-b9bd-56a6f4b740ca","Type":"ContainerDied","Data":"1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.765863 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e892-account-create-update-khr4s" event={"ID":"0bdb467a-423f-4374-8f68-0be7157d1402","Type":"ContainerDied","Data":"da37dd3ddf4fbc484cac61e3cb0e9e298b2b4002c82db0e39ab3d6ab9ff1c9b2"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.765942 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e892-account-create-update-khr4s" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.781572 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_94b790fb-3209-436d-b48d-f3978a82a557/ovsdbserver-sb/0.log" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.781656 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b790fb-3209-436d-b48d-f3978a82a557","Type":"ContainerDied","Data":"c0b1024ca651de5b8b9c86ca0714addfa49640f30d386a5747bc7b906237762e"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.781764 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.783887 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635 is running failed: container process not found" containerID="1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.790419 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635 is running failed: container process not found" containerID="1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.790963 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635 is running failed: container process not found" containerID="1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:17 crc kubenswrapper[4998]: E0203 07:09:17.791001 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerName="nova-cell1-conductor-conductor" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.791389 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rv82q" event={"ID":"28b1541e-d0f2-46fa-8518-554fb699ab4e","Type":"ContainerStarted","Data":"45cb6bddb50a3ecf5f763e83fe808826c43c64c90d775ae3039daf4b31cf165c"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.807871 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" event={"ID":"3e82a048-30ed-4db8-8977-220db05eff1a","Type":"ContainerDied","Data":"2f66e4e2b018e3f2d66f0752fd4b7a0ca5a750cc39325ca54c9db2ead533fec8"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.807991 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9147-account-create-update-5gj8v" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.813380 4998 scope.go:117] "RemoveContainer" containerID="7ac91621be4ad73c5875bb4f2e805c6f9619e9a22ac99cac808fd03dd2ffd425" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.830177 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.845481 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.850919 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.858632 4998 generic.go:334] "Generic (PLEG): container finished" podID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerID="5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514" exitCode=0 Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.858881 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.860105 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerDied","Data":"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.860202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4485520f-24df-4521-b5e9-6076ffa00bd6","Type":"ContainerDied","Data":"02914a77ca230a2af0025a1f9a22910193e3f32f7883122f185a9fcf0b7f2d07"} Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.860815 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-d9fff78f-v2ccx"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.883922 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:17 crc kubenswrapper[4998]: I0203 07:09:17.892984 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-e892-account-create-update-khr4s"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.002982 4998 scope.go:117] "RemoveContainer" containerID="8783d1fdb78c71bed87bdaf085a3d90306855381ef2f20915e65a6d24c97055f" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.003031 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.041836 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.052097 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.052183 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="ovn-northd" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.077566 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.091431 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9147-account-create-update-5gj8v"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.112924 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.118213 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.161707 4998 scope.go:117] "RemoveContainer" containerID="4f8ef037c2d0e4e053ae59278fde31f26ecb573b9d514deb328a8f1afc372f98" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.176926 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.211051 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvmf5\" (UniqueName: \"kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5\") pod \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.211141 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle\") pod \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.211247 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data\") pod \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\" (UID: \"7d04a830-0b11-4766-b9bd-56a6f4b740ca\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.211373 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.217245 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.224469 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5" (OuterVolumeSpecName: "kube-api-access-gvmf5") pod "7d04a830-0b11-4766-b9bd-56a6f4b740ca" (UID: "7d04a830-0b11-4766-b9bd-56a6f4b740ca"). InnerVolumeSpecName "kube-api-access-gvmf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.239996 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240497 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-server" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240515 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-server" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240526 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240534 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240547 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="init" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240554 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="init" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240582 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="ovsdbserver-nb" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240590 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="ovsdbserver-nb" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240602 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf9e314-f762-45c1-bab5-3cec9661a787" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240610 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf9e314-f762-45c1-bab5-3cec9661a787" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240629 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerName="nova-cell1-conductor-conductor" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240637 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerName="nova-cell1-conductor-conductor" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240647 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="mysql-bootstrap" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240657 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="mysql-bootstrap" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240667 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="ovsdbserver-sb" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240676 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="ovsdbserver-sb" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240688 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-httpd" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240696 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-httpd" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240712 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240719 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240727 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240736 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240745 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a85f317-a6ed-4d19-8222-136fda8b4517" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240753 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a85f317-a6ed-4d19-8222-136fda8b4517" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240771 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="dnsmasq-dns" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240834 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="dnsmasq-dns" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.240849 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="galera" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.240858 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="galera" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241086 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a85f317-a6ed-4d19-8222-136fda8b4517" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241103 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf9e314-f762-45c1-bab5-3cec9661a787" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241117 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" containerName="nova-cell1-conductor-conductor" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241132 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="ovsdbserver-nb" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241144 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="ovsdbserver-sb" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241158 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241170 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="94b790fb-3209-436d-b48d-f3978a82a557" containerName="openstack-network-exporter" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241183 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-server" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241197 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" containerName="ovn-controller" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241207 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" containerName="proxy-httpd" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241222 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" containerName="galera" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.241235 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" containerName="dnsmasq-dns" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.248824 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.252668 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.266725 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d04a830-0b11-4766-b9bd-56a6f4b740ca" (UID: "7d04a830-0b11-4766-b9bd-56a6f4b740ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.273744 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data" (OuterVolumeSpecName: "config-data") pod "7d04a830-0b11-4766-b9bd-56a6f4b740ca" (UID: "7d04a830-0b11-4766-b9bd-56a6f4b740ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.297726 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.298223 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-central-agent" containerID="cri-o://735a947af0313563fd152251e4f01a7d98bb038f3738b13aa0956d4060411491" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.299209 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="proxy-httpd" containerID="cri-o://bd760878032362b5e2dbbae0a17767478623ce84fedeea219d3003f8e309bbec" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.299364 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="sg-core" containerID="cri-o://da3d6f3489ff3ea9baaa57c3a51e435fe24ab651749286ef5d6fb275fe2c2a08" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.299457 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-notification-agent" containerID="cri-o://a7442e55da5b397ed68925bc02a81c7967c0ba70c8e805f561094c6b1def8d26" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.318006 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.320958 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg485\" (UniqueName: \"kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.321001 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.321064 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvmf5\" (UniqueName: \"kubernetes.io/projected/7d04a830-0b11-4766-b9bd-56a6f4b740ca-kube-api-access-gvmf5\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.321075 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.321084 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d04a830-0b11-4766-b9bd-56a6f4b740ca-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.321149 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.321192 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data podName:59f5a5d7-787a-4941-a2d3-2fe8db65cb31 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:22.32117814 +0000 UTC m=+1400.607871946 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data") pod "rabbitmq-server-0" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31") : configmap "rabbitmq-config-data" not found Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.336960 4998 scope.go:117] "RemoveContainer" containerID="f40f4be544d33826b95826c17cd8d7866898d03ff404be03363a0a348e155097" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.345979 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.346268 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="11bbac6a-fd7e-447b-af99-d0ebada848df" containerName="kube-state-metrics" containerID="cri-o://aa6b3f085720a2a5d6f33b6f4c5735de460cf2e60593dc25d3f9bedcbf5d7741" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.422643 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg485\" (UniqueName: \"kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.422692 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.423599 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.461810 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bdb467a-423f-4374-8f68-0be7157d1402" path="/var/lib/kubelet/pods/0bdb467a-423f-4374-8f68-0be7157d1402/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.462233 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e82a048-30ed-4db8-8977-220db05eff1a" path="/var/lib/kubelet/pods/3e82a048-30ed-4db8-8977-220db05eff1a/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.462867 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4485520f-24df-4521-b5e9-6076ffa00bd6" path="/var/lib/kubelet/pods/4485520f-24df-4521-b5e9-6076ffa00bd6/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.465262 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg485\" (UniqueName: \"kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485\") pod \"root-account-create-update-qdfsj\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.472124 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58c44471-d442-4736-a649-c762a1c893fa" path="/var/lib/kubelet/pods/58c44471-d442-4736-a649-c762a1c893fa/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.472861 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a08e2bf-f0a7-4812-8137-c305d886f174" path="/var/lib/kubelet/pods/5a08e2bf-f0a7-4812-8137-c305d886f174/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.473735 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88a17ed1-2cc7-488d-a325-67f99d3a12d8" path="/var/lib/kubelet/pods/88a17ed1-2cc7-488d-a325-67f99d3a12d8/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.481180 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94b790fb-3209-436d-b48d-f3978a82a557" path="/var/lib/kubelet/pods/94b790fb-3209-436d-b48d-f3978a82a557/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.481854 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a85f317-a6ed-4d19-8222-136fda8b4517" path="/var/lib/kubelet/pods/9a85f317-a6ed-4d19-8222-136fda8b4517/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.485013 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c" path="/var/lib/kubelet/pods/e603cc71-c5b7-4f97-9ed9-3c6d114ddb8c/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.489086 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaf9e314-f762-45c1-bab5-3cec9661a787" path="/var/lib/kubelet/pods/eaf9e314-f762-45c1-bab5-3cec9661a787/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.490772 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f61ec1cb-5d51-4c91-89a6-cbdb2d290838" path="/var/lib/kubelet/pods/f61ec1cb-5d51-4c91-89a6-cbdb2d290838/volumes" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.517982 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.564705 4998 scope.go:117] "RemoveContainer" containerID="5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.586370 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.635655 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bh4z\" (UniqueName: \"kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z\") pod \"28b1541e-d0f2-46fa-8518-554fb699ab4e\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.635826 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts\") pod \"28b1541e-d0f2-46fa-8518-554fb699ab4e\" (UID: \"28b1541e-d0f2-46fa-8518-554fb699ab4e\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.637121 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "28b1541e-d0f2-46fa-8518-554fb699ab4e" (UID: "28b1541e-d0f2-46fa-8518-554fb699ab4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.652064 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.652198 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z" (OuterVolumeSpecName: "kube-api-access-8bh4z") pod "28b1541e-d0f2-46fa-8518-554fb699ab4e" (UID: "28b1541e-d0f2-46fa-8518-554fb699ab4e"). InnerVolumeSpecName "kube-api-access-8bh4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.652340 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" containerName="memcached" containerID="cri-o://7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.660166 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9c9a-account-create-update-c26qq"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.727852 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9c9a-account-create-update-c26qq"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.743221 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9c9a-account-create-update-p7lvz"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.745413 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.754389 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.755034 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c9a-account-create-update-p7lvz"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.756645 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bh4z\" (UniqueName: \"kubernetes.io/projected/28b1541e-d0f2-46fa-8518-554fb699ab4e-kube-api-access-8bh4z\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.756664 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b1541e-d0f2-46fa-8518-554fb699ab4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.767505 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kl26j"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.771708 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.817572 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kl26j"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.822566 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4h259"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.844960 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4h259"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.851475 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.857244 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.857460 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-6c7dff8f57-z8bvm" podUID="4e13372a-d92b-4928-9e27-c1422d685e05" containerName="keystone-api" containerID="cri-o://2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565" gracePeriod=30 Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.870959 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts\") pod \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.871227 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grrlc\" (UniqueName: \"kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc\") pod \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\" (UID: \"daa99d07-07ba-49aa-82c3-8bcfca0c3564\") " Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.871615 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrgqn\" (UniqueName: \"kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.871654 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.872317 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "daa99d07-07ba-49aa-82c3-8bcfca0c3564" (UID: "daa99d07-07ba-49aa-82c3-8bcfca0c3564"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.884891 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9c9a-account-create-update-p7lvz"] Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.903356 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-vrgqn operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-9c9a-account-create-update-p7lvz" podUID="17941a92-4153-44a0-aab8-57fdb95a272f" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.904173 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc" (OuterVolumeSpecName: "kube-api-access-grrlc") pod "daa99d07-07ba-49aa-82c3-8bcfca0c3564" (UID: "daa99d07-07ba-49aa-82c3-8bcfca0c3564"). InnerVolumeSpecName "kube-api-access-grrlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.936736 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-jt7wn"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.954525 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-jt7wn"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.957202 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.963289 4998 scope.go:117] "RemoveContainer" containerID="9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.972972 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrgqn\" (UniqueName: \"kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.973047 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.973186 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grrlc\" (UniqueName: \"kubernetes.io/projected/daa99d07-07ba-49aa-82c3-8bcfca0c3564-kube-api-access-grrlc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: I0203 07:09:18.973197 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daa99d07-07ba-49aa-82c3-8bcfca0c3564-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.973262 4998 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 03 07:09:18 crc kubenswrapper[4998]: E0203 07:09:18.973309 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:19.473293792 +0000 UTC m=+1397.759987598 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : configmap "openstack-scripts" not found Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.003476 4998 projected.go:194] Error preparing data for projected volume kube-api-access-vrgqn for pod openstack/keystone-9c9a-account-create-update-p7lvz: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.003552 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:19.503532028 +0000 UTC m=+1397.790225834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vrgqn" (UniqueName: "kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.013746 4998 generic.go:334] "Generic (PLEG): container finished" podID="11bbac6a-fd7e-447b-af99-d0ebada848df" containerID="aa6b3f085720a2a5d6f33b6f4c5735de460cf2e60593dc25d3f9bedcbf5d7741" exitCode=2 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.013839 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"11bbac6a-fd7e-447b-af99-d0ebada848df","Type":"ContainerDied","Data":"aa6b3f085720a2a5d6f33b6f4c5735de460cf2e60593dc25d3f9bedcbf5d7741"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.051228 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:39436->10.217.0.205:8775: read: connection reset by peer" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.051606 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:39432->10.217.0.205:8775: read: connection reset by peer" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.079626 4998 generic.go:334] "Generic (PLEG): container finished" podID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerID="bd760878032362b5e2dbbae0a17767478623ce84fedeea219d3003f8e309bbec" exitCode=0 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.084935 4998 generic.go:334] "Generic (PLEG): container finished" podID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerID="da3d6f3489ff3ea9baaa57c3a51e435fe24ab651749286ef5d6fb275fe2c2a08" exitCode=2 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.079710 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerDied","Data":"bd760878032362b5e2dbbae0a17767478623ce84fedeea219d3003f8e309bbec"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.085124 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerDied","Data":"da3d6f3489ff3ea9baaa57c3a51e435fe24ab651749286ef5d6fb275fe2c2a08"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.089449 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7d04a830-0b11-4766-b9bd-56a6f4b740ca","Type":"ContainerDied","Data":"00f0c3127085073649953819f7d46f3820f2d37eebd2b1d90b011d11858cdbe8"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.089564 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.094161 4998 generic.go:334] "Generic (PLEG): container finished" podID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerID="9da387018f31a72ba944e277abd1e671ca26b317532692abef949c8149cd34b5" exitCode=0 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.094267 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerDied","Data":"9da387018f31a72ba944e277abd1e671ca26b317532692abef949c8149cd34b5"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.116496 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d2c0-account-create-update-688tk" event={"ID":"daa99d07-07ba-49aa-82c3-8bcfca0c3564","Type":"ContainerDied","Data":"f90d044887a61c80acf492df536cfbf05ce80fcd042d7767a21567c3dd1cc9a8"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.116511 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d2c0-account-create-update-688tk" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.130743 4998 scope.go:117] "RemoveContainer" containerID="5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514" Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.134749 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514\": container with ID starting with 5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514 not found: ID does not exist" containerID="5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.134820 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514"} err="failed to get container status \"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514\": rpc error: code = NotFound desc = could not find container \"5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514\": container with ID starting with 5b7b33ab0c2f178c9cbaf0012a4edcaa73280c5ee8e2f4c6a4faf102c9592514 not found: ID does not exist" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.134847 4998 scope.go:117] "RemoveContainer" containerID="9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99" Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.135412 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99\": container with ID starting with 9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99 not found: ID does not exist" containerID="9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.135468 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99"} err="failed to get container status \"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99\": rpc error: code = NotFound desc = could not find container \"9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99\": container with ID starting with 9d09a59cc4f696b04a31de6522435eb8ea7f55af8026ff6e14ede101c1dc6a99 not found: ID does not exist" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.135483 4998 scope.go:117] "RemoveContainer" containerID="1c1e1c81810cf3242e2c7121522b6a0136f94d3920df202359c1972e68fa1635" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.145027 4998 generic.go:334] "Generic (PLEG): container finished" podID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerID="0f64922061bc19d3ba8a787c55577aa51eb3d22b40d2c5ba7c8080728dee564e" exitCode=0 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.145111 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerDied","Data":"0f64922061bc19d3ba8a787c55577aa51eb3d22b40d2c5ba7c8080728dee564e"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.163159 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rv82q" event={"ID":"28b1541e-d0f2-46fa-8518-554fb699ab4e","Type":"ContainerDied","Data":"45cb6bddb50a3ecf5f763e83fe808826c43c64c90d775ae3039daf4b31cf165c"} Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.163308 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rv82q" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.201133 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.178:8776/healthcheck\": dial tcp 10.217.0.178:8776: connect: connection refused" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.227037 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="galera" containerID="cri-o://7fc3cd4817b7865d5e94684c7fc6231e72d32602e47f8979e462f57f046e481a" gracePeriod=30 Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.339690 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.382304 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.401793 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.423066 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.427465 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.442616 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-d2c0-account-create-update-688tk"] Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.461172 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.463922 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.471004 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.473131 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rv82q"] Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.482797 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.482868 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerName="nova-cell0-conductor-conductor" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.487799 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.487844 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qfgd\" (UniqueName: \"kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd\") pod \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.487896 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.487931 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488001 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488069 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488091 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488106 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts\") pod \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\" (UID: \"df47c2af-c1ee-4ab7-9691-ea6e28fa1831\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488133 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfp79\" (UniqueName: \"kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488171 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts\") pod \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\" (UID: \"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.488378 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.490518 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.491618 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs" (OuterVolumeSpecName: "logs") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.492084 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "df47c2af-c1ee-4ab7-9691-ea6e28fa1831" (UID: "df47c2af-c1ee-4ab7-9691-ea6e28fa1831"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.494438 4998 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.494514 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:20.494497176 +0000 UTC m=+1398.781190982 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : configmap "openstack-scripts" not found Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.510832 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.510968 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd" (OuterVolumeSpecName: "kube-api-access-5qfgd") pod "df47c2af-c1ee-4ab7-9691-ea6e28fa1831" (UID: "df47c2af-c1ee-4ab7-9691-ea6e28fa1831"). InnerVolumeSpecName "kube-api-access-5qfgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.526028 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79" (OuterVolumeSpecName: "kube-api-access-nfp79") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "kube-api-access-nfp79". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.529730 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts" (OuterVolumeSpecName: "scripts") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.563771 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.575934 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590133 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrgqn\" (UniqueName: \"kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590355 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590381 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590395 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590406 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfp79\" (UniqueName: \"kubernetes.io/projected/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-kube-api-access-nfp79\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590418 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590428 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qfgd\" (UniqueName: \"kubernetes.io/projected/df47c2af-c1ee-4ab7-9691-ea6e28fa1831-kube-api-access-5qfgd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590440 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.590450 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.596472 4998 projected.go:194] Error preparing data for projected volume kube-api-access-vrgqn for pod openstack/keystone-9c9a-account-create-update-p7lvz: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:19 crc kubenswrapper[4998]: E0203 07:09:19.596563 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:20.596537066 +0000 UTC m=+1398.883230872 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-vrgqn" (UniqueName: "kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.600078 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data" (OuterVolumeSpecName: "config-data") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.600442 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" (UID: "4767a1f8-263b-4c49-8cb7-7a7f9b8271dc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.617915 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.690899 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-747cb48568-vkq22" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": dial tcp 10.217.0.160:9311: connect: connection refused" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.690995 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-747cb48568-vkq22" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": dial tcp 10.217.0.160:9311: connect: connection refused" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.691595 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle\") pod \"11bbac6a-fd7e-447b-af99-d0ebada848df\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.691704 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs\") pod \"11bbac6a-fd7e-447b-af99-d0ebada848df\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.691911 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xkjp\" (UniqueName: \"kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp\") pod \"11bbac6a-fd7e-447b-af99-d0ebada848df\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.691955 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config\") pod \"11bbac6a-fd7e-447b-af99-d0ebada848df\" (UID: \"11bbac6a-fd7e-447b-af99-d0ebada848df\") " Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.692418 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.692433 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.692445 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.697494 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp" (OuterVolumeSpecName: "kube-api-access-8xkjp") pod "11bbac6a-fd7e-447b-af99-d0ebada848df" (UID: "11bbac6a-fd7e-447b-af99-d0ebada848df"). InnerVolumeSpecName "kube-api-access-8xkjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.728660 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "11bbac6a-fd7e-447b-af99-d0ebada848df" (UID: "11bbac6a-fd7e-447b-af99-d0ebada848df"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.752887 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11bbac6a-fd7e-447b-af99-d0ebada848df" (UID: "11bbac6a-fd7e-447b-af99-d0ebada848df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.781345 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "11bbac6a-fd7e-447b-af99-d0ebada848df" (UID: "11bbac6a-fd7e-447b-af99-d0ebada848df"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.825228 4998 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.825263 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.825273 4998 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:19 crc kubenswrapper[4998]: I0203 07:09:19.825286 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xkjp\" (UniqueName: \"kubernetes.io/projected/11bbac6a-fd7e-447b-af99-d0ebada848df-kube-api-access-8xkjp\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:19.999122 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 is running failed: container process not found" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:19.999539 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 is running failed: container process not found" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.000019 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 is running failed: container process not found" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.000087 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerName="nova-scheduler-scheduler" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.100254 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.107542 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.120106 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.125917 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.138009 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.146261 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.147434 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.205163 4998 generic.go:334] "Generic (PLEG): container finished" podID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerID="5b99f558d74ddfe530b75fa587287403c988c189202261dc57a486311499d5c9" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.205316 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerDied","Data":"5b99f558d74ddfe530b75fa587287403c988c189202261dc57a486311499d5c9"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.223900 4998 generic.go:334] "Generic (PLEG): container finished" podID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerID="735a947af0313563fd152251e4f01a7d98bb038f3738b13aa0956d4060411491" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.223973 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerDied","Data":"735a947af0313563fd152251e4f01a7d98bb038f3738b13aa0956d4060411491"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.231195 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"11bbac6a-fd7e-447b-af99-d0ebada848df","Type":"ContainerDied","Data":"a0f85722fdd3ee4fadb797fc4382cc57942cd4b679971803845a89ce09bfe1e2"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.231247 4998 scope.go:117] "RemoveContainer" containerID="aa6b3f085720a2a5d6f33b6f4c5735de460cf2e60593dc25d3f9bedcbf5d7741" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.231348 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234707 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234739 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234769 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62dc5\" (UniqueName: \"kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5\") pod \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234814 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234847 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle\") pod \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234872 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt9ps\" (UniqueName: \"kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps\") pod \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234940 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234964 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.234987 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config\") pod \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235007 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235027 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data\") pod \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235098 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235126 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data\") pod \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235147 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235170 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235192 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235224 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle\") pod \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235256 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235296 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6t2n\" (UniqueName: \"kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235327 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235361 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs\") pod \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235391 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235424 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235448 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle\") pod \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\" (UID: \"8a94b6e1-cdf7-4088-9f55-60457fa411f4\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235481 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235514 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235536 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c7sz\" (UniqueName: \"kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235561 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jx9q\" (UniqueName: \"kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235581 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235604 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs\") pod \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235632 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235658 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs\") pod \"59162297-8dd9-4ddd-a18b-8045d2f6c610\" (UID: \"59162297-8dd9-4ddd-a18b-8045d2f6c610\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235679 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data\") pod \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\" (UID: \"18701d06-8e80-4822-9128-dd9ba0e5bf1c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235703 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235748 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs\") pod \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235771 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sbff\" (UniqueName: \"kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff\") pod \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\" (UID: \"b1280d3d-d626-4af9-b262-93fea6a5bbc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.235979 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.236023 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data\") pod \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\" (UID: \"3e51da52-0dd9-4394-bb81-c4a1e534ad17\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.236052 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5p28\" (UniqueName: \"kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28\") pod \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\" (UID: \"3c2bbe1b-74b6-4e3c-8468-735ad0b00146\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.236079 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.236104 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.243330 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "3c2bbe1b-74b6-4e3c-8468-735ad0b00146" (UID: "3c2bbe1b-74b6-4e3c-8468-735ad0b00146"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.244476 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n" (OuterVolumeSpecName: "kube-api-access-h6t2n") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "kube-api-access-h6t2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.245707 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.246051 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs" (OuterVolumeSpecName: "logs") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.247407 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data" (OuterVolumeSpecName: "config-data") pod "3c2bbe1b-74b6-4e3c-8468-735ad0b00146" (UID: "3c2bbe1b-74b6-4e3c-8468-735ad0b00146"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.250616 4998 generic.go:334] "Generic (PLEG): container finished" podID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.250718 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a94b6e1-cdf7-4088-9f55-60457fa411f4","Type":"ContainerDied","Data":"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.250753 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a94b6e1-cdf7-4088-9f55-60457fa411f4","Type":"ContainerDied","Data":"5ec0b5ec4fb1b7333e0489cfb6bf47739e935fbfd86ad7e44ffc03d0071abfc7"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.250841 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.251029 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs" (OuterVolumeSpecName: "logs") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.251312 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs" (OuterVolumeSpecName: "logs") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.253006 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs" (OuterVolumeSpecName: "logs") pod "3e51da52-0dd9-4394-bb81-c4a1e534ad17" (UID: "3e51da52-0dd9-4394-bb81-c4a1e534ad17"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.254590 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.255026 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs" (OuterVolumeSpecName: "logs") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.255869 4998 generic.go:334] "Generic (PLEG): container finished" podID="c122d5d6-c472-46c4-9baf-195893bff38a" containerID="115a3825a27cab7fdfb490f904a7aeed06539b9f2cfa910218ec08fed15d9afa" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.255941 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerDied","Data":"115a3825a27cab7fdfb490f904a7aeed06539b9f2cfa910218ec08fed15d9afa"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.263092 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5" (OuterVolumeSpecName: "kube-api-access-62dc5") pod "3e51da52-0dd9-4394-bb81-c4a1e534ad17" (UID: "3e51da52-0dd9-4394-bb81-c4a1e534ad17"). InnerVolumeSpecName "kube-api-access-62dc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.263653 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts" (OuterVolumeSpecName: "scripts") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.263740 4998 scope.go:117] "RemoveContainer" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.263828 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff" (OuterVolumeSpecName: "kube-api-access-7sbff") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "kube-api-access-7sbff". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.264410 4998 generic.go:334] "Generic (PLEG): container finished" podID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" containerID="7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.264515 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.264516 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3c2bbe1b-74b6-4e3c-8468-735ad0b00146","Type":"ContainerDied","Data":"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.264552 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3c2bbe1b-74b6-4e3c-8468-735ad0b00146","Type":"ContainerDied","Data":"9018affb46e48c99b7178d1e37d5d7a5ca88267d23450c11def1be28b1dca6e1"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.265383 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.265710 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.265809 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.266141 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts" (OuterVolumeSpecName: "scripts") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.274505 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q" (OuterVolumeSpecName: "kube-api-access-2jx9q") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "kube-api-access-2jx9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.285093 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz" (OuterVolumeSpecName: "kube-api-access-4c7sz") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "kube-api-access-4c7sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.287017 4998 generic.go:334] "Generic (PLEG): container finished" podID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerID="240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.287100 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerDied","Data":"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.287132 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-747cb48568-vkq22" event={"ID":"92b2a8f5-7091-4044-a057-3fc94b78439c","Type":"ContainerDied","Data":"47d1fe608f0400993fa03cdad2f24f3a0453bce91ace38179532262a167a10fa"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.287204 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-747cb48568-vkq22" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.287472 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps" (OuterVolumeSpecName: "kube-api-access-bt9ps") pod "8a94b6e1-cdf7-4088-9f55-60457fa411f4" (UID: "8a94b6e1-cdf7-4088-9f55-60457fa411f4"). InnerVolumeSpecName "kube-api-access-bt9ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.292332 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18701d06-8e80-4822-9128-dd9ba0e5bf1c","Type":"ContainerDied","Data":"8120ba3ef48a2454a4d7882b34f5816ff1db40ae331e6ccb89f27488c8e2810c"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.292426 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.295047 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4767a1f8-263b-4c49-8cb7-7a7f9b8271dc","Type":"ContainerDied","Data":"918defb49b218088e5c8bb4389970c6bc72985ee7638f7092bc429c731e59c13"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.295110 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.317832 4998 scope.go:117] "RemoveContainer" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.318419 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6\": container with ID starting with 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 not found: ID does not exist" containerID="4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.318444 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6"} err="failed to get container status \"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6\": rpc error: code = NotFound desc = could not find container \"4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6\": container with ID starting with 4941f0989a841b63f25282cf5a3988025476630db1ac4e5885254632064a0cc6 not found: ID does not exist" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.318460 4998 scope.go:117] "RemoveContainer" containerID="7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.319630 4998 generic.go:334] "Generic (PLEG): container finished" podID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerID="f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.319775 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.320194 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerDied","Data":"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.320250 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1280d3d-d626-4af9-b262-93fea6a5bbc9","Type":"ContainerDied","Data":"3cac9d8cd817ada51f05662f9efe986e05e76cc89cff3d334f67cffa2cc3efca"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.321865 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.326813 4998 generic.go:334] "Generic (PLEG): container finished" podID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerID="659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.326877 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec1012bb-b11f-4248-aa77-f9076a2a1fc9","Type":"ContainerDied","Data":"659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.329800 4998 generic.go:334] "Generic (PLEG): container finished" podID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerID="32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.329881 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerDied","Data":"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.329917 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.329898 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"59162297-8dd9-4ddd-a18b-8045d2f6c610","Type":"ContainerDied","Data":"81e179c62f076b1235befec968a1c924c07c06497035bf25bb7be2df848c48d0"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.332150 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" event={"ID":"df47c2af-c1ee-4ab7-9691-ea6e28fa1831","Type":"ContainerDied","Data":"77b7d546b82831e3b3aa93b63870138790207ad655c4881ba84dc5413744d2fb"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.332215 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-65da-account-create-update-mnzrq" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344441 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1280d3d-d626-4af9-b262-93fea6a5bbc9-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344470 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344483 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62dc5\" (UniqueName: \"kubernetes.io/projected/3e51da52-0dd9-4394-bb81-c4a1e534ad17-kube-api-access-62dc5\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344493 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344506 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt9ps\" (UniqueName: \"kubernetes.io/projected/8a94b6e1-cdf7-4088-9f55-60457fa411f4-kube-api-access-bt9ps\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344514 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344521 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18701d06-8e80-4822-9128-dd9ba0e5bf1c-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344530 4998 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344539 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344567 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344576 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6t2n\" (UniqueName: \"kubernetes.io/projected/92b2a8f5-7091-4044-a057-3fc94b78439c-kube-api-access-h6t2n\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344588 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e51da52-0dd9-4394-bb81-c4a1e534ad17-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344596 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344605 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344613 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c7sz\" (UniqueName: \"kubernetes.io/projected/59162297-8dd9-4ddd-a18b-8045d2f6c610-kube-api-access-4c7sz\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344622 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jx9q\" (UniqueName: \"kubernetes.io/projected/18701d06-8e80-4822-9128-dd9ba0e5bf1c-kube-api-access-2jx9q\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344631 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59162297-8dd9-4ddd-a18b-8045d2f6c610-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344639 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59162297-8dd9-4ddd-a18b-8045d2f6c610-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344647 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sbff\" (UniqueName: \"kubernetes.io/projected/b1280d3d-d626-4af9-b262-93fea6a5bbc9-kube-api-access-7sbff\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.344656 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92b2a8f5-7091-4044-a057-3fc94b78439c-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.350394 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28" (OuterVolumeSpecName: "kube-api-access-r5p28") pod "3c2bbe1b-74b6-4e3c-8468-735ad0b00146" (UID: "3c2bbe1b-74b6-4e3c-8468-735ad0b00146"). InnerVolumeSpecName "kube-api-access-r5p28". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.353211 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.354385 4998 generic.go:334] "Generic (PLEG): container finished" podID="13caad28-67cf-4251-9a98-e324e6f9722a" containerID="59b03e0b0b59b7355fdc4f5e7fb6fc2a865970667616376aeba4c7993df8fea4" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.354532 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerDied","Data":"59b03e0b0b59b7355fdc4f5e7fb6fc2a865970667616376aeba4c7993df8fea4"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.360440 4998 generic.go:334] "Generic (PLEG): container finished" podID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerID="df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304" exitCode=0 Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.360665 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.363757 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.364387 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerDied","Data":"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.364428 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e51da52-0dd9-4394-bb81-c4a1e534ad17","Type":"ContainerDied","Data":"ea39da380c4f22920d284cc6f15e2b24322f06345bed439ea44f73f6349ff79c"} Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.374239 4998 scope.go:117] "RemoveContainer" containerID="7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.375923 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e\": container with ID starting with 7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e not found: ID does not exist" containerID="7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.375952 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e"} err="failed to get container status \"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e\": rpc error: code = NotFound desc = could not find container \"7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e\": container with ID starting with 7dd0a0658aa5ed78204aab74bc571019a1cf718b8da1141cbae6e79b65aadb7e not found: ID does not exist" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.375972 4998 scope.go:117] "RemoveContainer" containerID="240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.383355 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.384120 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.405497 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.414322 4998 scope.go:117] "RemoveContainer" containerID="2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.422807 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.453064 4998 scope.go:117] "RemoveContainer" containerID="240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.453765 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.454595 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e\": container with ID starting with 240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e not found: ID does not exist" containerID="240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.454641 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e"} err="failed to get container status \"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e\": rpc error: code = NotFound desc = could not find container \"240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e\": container with ID starting with 240cf161b75ce831815244c0f07efbf2f46a5553a467cc8c7c9190165768d96e not found: ID does not exist" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.454682 4998 scope.go:117] "RemoveContainer" containerID="2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.455748 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585\": container with ID starting with 2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585 not found: ID does not exist" containerID="2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.455868 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585"} err="failed to get container status \"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585\": rpc error: code = NotFound desc = could not find container \"2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585\": container with ID starting with 2796cd74bb8475cbb6d691b8b975cd135e78b04d8ba8f94e7349bbd27134e585 not found: ID does not exist" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.455943 4998 scope.go:117] "RemoveContainer" containerID="9da387018f31a72ba944e277abd1e671ca26b317532692abef949c8149cd34b5" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.456486 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5p28\" (UniqueName: \"kubernetes.io/projected/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-kube-api-access-r5p28\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.459818 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11bbac6a-fd7e-447b-af99-d0ebada848df" path="/var/lib/kubelet/pods/11bbac6a-fd7e-447b-af99-d0ebada848df/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.460620 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f901f4-5696-4545-99e5-f8b1583b7431" path="/var/lib/kubelet/pods/23f901f4-5696-4545-99e5-f8b1583b7431/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.461581 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28b1541e-d0f2-46fa-8518-554fb699ab4e" path="/var/lib/kubelet/pods/28b1541e-d0f2-46fa-8518-554fb699ab4e/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.462213 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" path="/var/lib/kubelet/pods/4767a1f8-263b-4c49-8cb7-7a7f9b8271dc/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.463561 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6732bc1e-c4ad-4519-bcfd-3c973c7528a6" path="/var/lib/kubelet/pods/6732bc1e-c4ad-4519-bcfd-3c973c7528a6/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.464339 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d04a830-0b11-4766-b9bd-56a6f4b740ca" path="/var/lib/kubelet/pods/7d04a830-0b11-4766-b9bd-56a6f4b740ca/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.465000 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2902281-e838-41cf-bc34-89850ed0cf83" path="/var/lib/kubelet/pods/b2902281-e838-41cf-bc34-89850ed0cf83/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.466183 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca949bbc-e75e-48f4-80f2-825ec09184a3" path="/var/lib/kubelet/pods/ca949bbc-e75e-48f4-80f2-825ec09184a3/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.467496 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daa99d07-07ba-49aa-82c3-8bcfca0c3564" path="/var/lib/kubelet/pods/daa99d07-07ba-49aa-82c3-8bcfca0c3564/volumes" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.498797 4998 scope.go:117] "RemoveContainer" containerID="f45874e7a3396bb25e17a358e507a1b8a9db053a662b6cd39150143abd3c21f5" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.501734 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.503611 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-65da-account-create-update-mnzrq"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.523053 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.550662 4998 scope.go:117] "RemoveContainer" containerID="0f64922061bc19d3ba8a787c55577aa51eb3d22b40d2c5ba7c8080728dee564e" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557348 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom\") pod \"13caad28-67cf-4251-9a98-e324e6f9722a\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557482 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs\") pod \"0df5b57a-e165-41ef-8e19-30b87b9566f3\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557538 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ft5n\" (UniqueName: \"kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n\") pod \"13caad28-67cf-4251-9a98-e324e6f9722a\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557569 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data\") pod \"13caad28-67cf-4251-9a98-e324e6f9722a\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle\") pod \"13caad28-67cf-4251-9a98-e324e6f9722a\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557646 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfjfq\" (UniqueName: \"kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq\") pod \"0df5b57a-e165-41ef-8e19-30b87b9566f3\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557669 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom\") pod \"0df5b57a-e165-41ef-8e19-30b87b9566f3\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557714 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data\") pod \"0df5b57a-e165-41ef-8e19-30b87b9566f3\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557746 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle\") pod \"0df5b57a-e165-41ef-8e19-30b87b9566f3\" (UID: \"0df5b57a-e165-41ef-8e19-30b87b9566f3\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.557838 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs\") pod \"13caad28-67cf-4251-9a98-e324e6f9722a\" (UID: \"13caad28-67cf-4251-9a98-e324e6f9722a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.558193 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.560712 4998 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.560826 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:22.56080235 +0000 UTC m=+1400.847496216 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : configmap "openstack-scripts" not found Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.560943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs" (OuterVolumeSpecName: "logs") pod "0df5b57a-e165-41ef-8e19-30b87b9566f3" (UID: "0df5b57a-e165-41ef-8e19-30b87b9566f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.561192 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.561316 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs" (OuterVolumeSpecName: "logs") pod "13caad28-67cf-4251-9a98-e324e6f9722a" (UID: "13caad28-67cf-4251-9a98-e324e6f9722a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.565990 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data" (OuterVolumeSpecName: "config-data") pod "3e51da52-0dd9-4394-bb81-c4a1e534ad17" (UID: "3e51da52-0dd9-4394-bb81-c4a1e534ad17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.565995 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.566946 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "13caad28-67cf-4251-9a98-e324e6f9722a" (UID: "13caad28-67cf-4251-9a98-e324e6f9722a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.575154 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq" (OuterVolumeSpecName: "kube-api-access-jfjfq") pod "0df5b57a-e165-41ef-8e19-30b87b9566f3" (UID: "0df5b57a-e165-41ef-8e19-30b87b9566f3"). InnerVolumeSpecName "kube-api-access-jfjfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.587967 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0df5b57a-e165-41ef-8e19-30b87b9566f3" (UID: "0df5b57a-e165-41ef-8e19-30b87b9566f3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.588219 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data" (OuterVolumeSpecName: "config-data") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.588152 4998 scope.go:117] "RemoveContainer" containerID="8cde06b960a90ef44310da758f839bc01b1bda0dd3d7ba611dc66ebe32a3eaba" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.588462 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n" (OuterVolumeSpecName: "kube-api-access-6ft5n") pod "13caad28-67cf-4251-9a98-e324e6f9722a" (UID: "13caad28-67cf-4251-9a98-e324e6f9722a"). InnerVolumeSpecName "kube-api-access-6ft5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.616440 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e51da52-0dd9-4394-bb81-c4a1e534ad17" (UID: "3e51da52-0dd9-4394-bb81-c4a1e534ad17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.630172 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.635938 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.643361 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.646958 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c2bbe1b-74b6-4e3c-8468-735ad0b00146" (UID: "3c2bbe1b-74b6-4e3c-8468-735ad0b00146"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.660972 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.661962 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data" (OuterVolumeSpecName: "config-data") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.662453 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.662753 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.662905 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663018 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") pod \"92b2a8f5-7091-4044-a057-3fc94b78439c\" (UID: \"92b2a8f5-7091-4044-a057-3fc94b78439c\") " Feb 03 07:09:20 crc kubenswrapper[4998]: W0203 07:09:20.663184 4998 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/92b2a8f5-7091-4044-a057-3fc94b78439c/volumes/kubernetes.io~secret/config-data Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663224 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data" (OuterVolumeSpecName: "config-data") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663207 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663508 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663610 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663749 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p46cc\" (UniqueName: \"kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc\") pod \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663349 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs" (OuterVolumeSpecName: "logs") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.663969 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data\") pod \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.664153 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z2fv\" (UniqueName: \"kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv\") pod \"c122d5d6-c472-46c4-9baf-195893bff38a\" (UID: \"c122d5d6-c472-46c4-9baf-195893bff38a\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.664271 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle\") pod \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\" (UID: \"ec1012bb-b11f-4248-aa77-f9076a2a1fc9\") " Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.665079 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrgqn\" (UniqueName: \"kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn\") pod \"keystone-9c9a-account-create-update-p7lvz\" (UID: \"17941a92-4153-44a0-aab8-57fdb95a272f\") " pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.665693 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13caad28-67cf-4251-9a98-e324e6f9722a-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.665812 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.665925 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666025 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666117 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666213 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0df5b57a-e165-41ef-8e19-30b87b9566f3-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666308 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666435 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666529 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ft5n\" (UniqueName: \"kubernetes.io/projected/13caad28-67cf-4251-9a98-e324e6f9722a-kube-api-access-6ft5n\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666717 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666838 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c122d5d6-c472-46c4-9baf-195893bff38a-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.666953 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.667072 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfjfq\" (UniqueName: \"kubernetes.io/projected/0df5b57a-e165-41ef-8e19-30b87b9566f3-kube-api-access-jfjfq\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.667179 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.667528 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.667552 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.667982 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.668322 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.671248 4998 projected.go:194] Error preparing data for projected volume kube-api-access-vrgqn for pod openstack/keystone-9c9a-account-create-update-p7lvz: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.671324 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn podName:17941a92-4153-44a0-aab8-57fdb95a272f nodeName:}" failed. No retries permitted until 2026-02-03 07:09:22.671307511 +0000 UTC m=+1400.958001317 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-vrgqn" (UniqueName: "kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn") pod "keystone-9c9a-account-create-update-p7lvz" (UID: "17941a92-4153-44a0-aab8-57fdb95a272f") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.682011 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts" (OuterVolumeSpecName: "scripts") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.682221 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc" (OuterVolumeSpecName: "kube-api-access-p46cc") pod "ec1012bb-b11f-4248-aa77-f9076a2a1fc9" (UID: "ec1012bb-b11f-4248-aa77-f9076a2a1fc9"). InnerVolumeSpecName "kube-api-access-p46cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.688748 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv" (OuterVolumeSpecName: "kube-api-access-7z2fv") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "kube-api-access-7z2fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.691401 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:09:20 crc kubenswrapper[4998]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:0f7943e02fbdd3daec1d3db72fa9396bf37ad3fdd6b0f3119c90e29629e095ed,Command:[/bin/sh -c #!/bin/bash Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: if [ -n "" ]; then Feb 03 07:09:20 crc kubenswrapper[4998]: GRANT_DATABASE="" Feb 03 07:09:20 crc kubenswrapper[4998]: else Feb 03 07:09:20 crc kubenswrapper[4998]: GRANT_DATABASE="*" Feb 03 07:09:20 crc kubenswrapper[4998]: fi Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: # going for maximum compatibility here: Feb 03 07:09:20 crc kubenswrapper[4998]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 03 07:09:20 crc kubenswrapper[4998]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 03 07:09:20 crc kubenswrapper[4998]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 03 07:09:20 crc kubenswrapper[4998]: # support updates Feb 03 07:09:20 crc kubenswrapper[4998]: Feb 03 07:09:20 crc kubenswrapper[4998]: $MYSQL_CMD < logger="UnhandledError" Feb 03 07:09:20 crc kubenswrapper[4998]: E0203 07:09:20.692487 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-qdfsj" podUID="5f513eeb-f447-48ba-a53a-096d4aa0275a" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.707615 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.731080 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0df5b57a-e165-41ef-8e19-30b87b9566f3" (UID: "0df5b57a-e165-41ef-8e19-30b87b9566f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.733938 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13caad28-67cf-4251-9a98-e324e6f9722a" (UID: "13caad28-67cf-4251-9a98-e324e6f9722a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.735508 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "3c2bbe1b-74b6-4e3c-8468-735ad0b00146" (UID: "3c2bbe1b-74b6-4e3c-8468-735ad0b00146"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.741958 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data" (OuterVolumeSpecName: "config-data") pod "8a94b6e1-cdf7-4088-9f55-60457fa411f4" (UID: "8a94b6e1-cdf7-4088-9f55-60457fa411f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.748882 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data" (OuterVolumeSpecName: "config-data") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771089 4998 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c2bbe1b-74b6-4e3c-8468-735ad0b00146-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771122 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771131 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771140 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771150 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771245 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771254 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771263 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p46cc\" (UniqueName: \"kubernetes.io/projected/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-kube-api-access-p46cc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.771316 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z2fv\" (UniqueName: \"kubernetes.io/projected/c122d5d6-c472-46c4-9baf-195893bff38a-kube-api-access-7z2fv\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.781997 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.786073 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.806580 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data" (OuterVolumeSpecName: "config-data") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.812905 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data" (OuterVolumeSpecName: "config-data") pod "0df5b57a-e165-41ef-8e19-30b87b9566f3" (UID: "0df5b57a-e165-41ef-8e19-30b87b9566f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.840921 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a94b6e1-cdf7-4088-9f55-60457fa411f4" (UID: "8a94b6e1-cdf7-4088-9f55-60457fa411f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.841188 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data" (OuterVolumeSpecName: "config-data") pod "ec1012bb-b11f-4248-aa77-f9076a2a1fc9" (UID: "ec1012bb-b11f-4248-aa77-f9076a2a1fc9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.846014 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec1012bb-b11f-4248-aa77-f9076a2a1fc9" (UID: "ec1012bb-b11f-4248-aa77-f9076a2a1fc9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.864930 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b1280d3d-d626-4af9-b262-93fea6a5bbc9" (UID: "b1280d3d-d626-4af9-b262-93fea6a5bbc9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.868085 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "59162297-8dd9-4ddd-a18b-8045d2f6c610" (UID: "59162297-8dd9-4ddd-a18b-8045d2f6c610"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.872028 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data" (OuterVolumeSpecName: "config-data") pod "13caad28-67cf-4251-9a98-e324e6f9722a" (UID: "13caad28-67cf-4251-9a98-e324e6f9722a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875073 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875105 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875122 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875133 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a94b6e1-cdf7-4088-9f55-60457fa411f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875144 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1012bb-b11f-4248-aa77-f9076a2a1fc9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875155 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875167 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1280d3d-d626-4af9-b262-93fea6a5bbc9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875178 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13caad28-67cf-4251-9a98-e324e6f9722a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875190 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59162297-8dd9-4ddd-a18b-8045d2f6c610-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.875201 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df5b57a-e165-41ef-8e19-30b87b9566f3-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.900707 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3e51da52-0dd9-4394-bb81-c4a1e534ad17" (UID: "3e51da52-0dd9-4394-bb81-c4a1e534ad17"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.903041 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.904431 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "92b2a8f5-7091-4044-a057-3fc94b78439c" (UID: "92b2a8f5-7091-4044-a057-3fc94b78439c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.930064 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.958764 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.960008 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "18701d06-8e80-4822-9128-dd9ba0e5bf1c" (UID: "18701d06-8e80-4822-9128-dd9ba0e5bf1c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977617 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18701d06-8e80-4822-9128-dd9ba0e5bf1c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977644 4998 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e51da52-0dd9-4394-bb81-c4a1e534ad17-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977656 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977667 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92b2a8f5-7091-4044-a057-3fc94b78439c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977677 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977690 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:20 crc kubenswrapper[4998]: I0203 07:09:20.977823 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data" (OuterVolumeSpecName: "config-data") pod "c122d5d6-c472-46c4-9baf-195893bff38a" (UID: "c122d5d6-c472-46c4-9baf-195893bff38a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.081817 4998 scope.go:117] "RemoveContainer" containerID="f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.082469 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c122d5d6-c472-46c4-9baf-195893bff38a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.100897 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.119476 4998 scope.go:117] "RemoveContainer" containerID="6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.120244 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.140432 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.149570 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.158798 4998 scope.go:117] "RemoveContainer" containerID="f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.158889 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.159928 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983\": container with ID starting with f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983 not found: ID does not exist" containerID="f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.159954 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983"} err="failed to get container status \"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983\": rpc error: code = NotFound desc = could not find container \"f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983\": container with ID starting with f4738971fa2556954577c66479ee76038051cc7ed9700481aa4de2594ba0b983 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.159976 4998 scope.go:117] "RemoveContainer" containerID="6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.161934 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425\": container with ID starting with 6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425 not found: ID does not exist" containerID="6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.161975 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425"} err="failed to get container status \"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425\": rpc error: code = NotFound desc = could not find container \"6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425\": container with ID starting with 6111c8827a1b20f6b8e01ed8beb9ba58ab11e1ba6f80bd56b10904401d415425 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.162000 4998 scope.go:117] "RemoveContainer" containerID="32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.172310 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.182734 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.183689 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.183736 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data podName:cc9d5160-2c51-474c-aca1-1af693753ee8 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:29.183721105 +0000 UTC m=+1407.470414911 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data") pod "rabbitmq-cell1-server-0" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8") : configmap "rabbitmq-cell1-config-data" not found Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.186015 4998 scope.go:117] "RemoveContainer" containerID="7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.191348 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.205452 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.207217 4998 scope.go:117] "RemoveContainer" containerID="32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.207644 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5\": container with ID starting with 32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5 not found: ID does not exist" containerID="32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.207683 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5"} err="failed to get container status \"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5\": rpc error: code = NotFound desc = could not find container \"32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5\": container with ID starting with 32b39b9d482c5108ab853ad62904b980cd74eaab84e9164c4243b13c2f37c3c5 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.207708 4998 scope.go:117] "RemoveContainer" containerID="7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.207971 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2\": container with ID starting with 7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2 not found: ID does not exist" containerID="7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.207995 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2"} err="failed to get container status \"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2\": rpc error: code = NotFound desc = could not find container \"7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2\": container with ID starting with 7d86f1f77c5869d58d533147a1dd56150d79e3f61aa23ff73cbef100c39012e2 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.208009 4998 scope.go:117] "RemoveContainer" containerID="df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.212907 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.230655 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.239763 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-747cb48568-vkq22"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.246889 4998 scope.go:117] "RemoveContainer" containerID="e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.249294 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.255867 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.283284 4998 scope.go:117] "RemoveContainer" containerID="df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.283943 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304\": container with ID starting with df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304 not found: ID does not exist" containerID="df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.283978 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304"} err="failed to get container status \"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304\": rpc error: code = NotFound desc = could not find container \"df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304\": container with ID starting with df2f93789843ee0c929625754c662eef472a6b4dd88ccbe2cb42cb58b6f17304 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.284000 4998 scope.go:117] "RemoveContainer" containerID="e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.284299 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353\": container with ID starting with e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353 not found: ID does not exist" containerID="e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.284350 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353"} err="failed to get container status \"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353\": rpc error: code = NotFound desc = could not find container \"e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353\": container with ID starting with e5a74b71aeac6fcb20830db0c86ae6a7644b150ccbef95f2ae2b39cce49f5353 not found: ID does not exist" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.373245 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78755df597-h9t98" event={"ID":"0df5b57a-e165-41ef-8e19-30b87b9566f3","Type":"ContainerDied","Data":"853d99251509041b74e3f3b997fb67cbc9cf4092af979f6b11aae04063d8580d"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.373298 4998 scope.go:117] "RemoveContainer" containerID="5b99f558d74ddfe530b75fa587287403c988c189202261dc57a486311499d5c9" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.373398 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78755df597-h9t98" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.379469 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.380743 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec1012bb-b11f-4248-aa77-f9076a2a1fc9","Type":"ContainerDied","Data":"08e0a0b900f5ed5356f147f8035f9a669e6c6cbdc6a6f19074d45726d8082e6d"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.390551 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qdfsj" event={"ID":"5f513eeb-f447-48ba-a53a-096d4aa0275a","Type":"ContainerStarted","Data":"47133428fbbe8ccc4e3ae256d056ff15d38f52e08ddc15a657d2eb13f5ac9a84"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.405891 4998 scope.go:117] "RemoveContainer" containerID="e0270c6ae00fb3fd677409d836f450c36d50d559a6439b219567647b04b70ce8" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.416673 4998 generic.go:334] "Generic (PLEG): container finished" podID="f5714626-00c5-4b11-b056-40ff428fc017" containerID="7fc3cd4817b7865d5e94684c7fc6231e72d32602e47f8979e462f57f046e481a" exitCode=0 Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.416718 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerDied","Data":"7fc3cd4817b7865d5e94684c7fc6231e72d32602e47f8979e462f57f046e481a"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.419759 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.424391 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d94bbfff8-q2v5c" event={"ID":"13caad28-67cf-4251-9a98-e324e6f9722a","Type":"ContainerDied","Data":"ed5c7edf59f7b745ecdcf9391b4cc4768f798369d1d2c58753eec22b448e5b42"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.427506 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55ccbc8794-9m7vc" event={"ID":"c122d5d6-c472-46c4-9baf-195893bff38a","Type":"ContainerDied","Data":"fb5dd646f9bb3c40a0c2cd2e6254048d8003d080acb3ca4b420968ebd8b882ce"} Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.427591 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55ccbc8794-9m7vc" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.434492 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c9a-account-create-update-p7lvz" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.445803 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.464970 4998 scope.go:117] "RemoveContainer" containerID="659621b3871feec5ab21a36f8efb54aeb0a5418dddb477388b03fcd569919a1f" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.465115 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-78755df597-h9t98"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.513169 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.520150 4998 scope.go:117] "RemoveContainer" containerID="59b03e0b0b59b7355fdc4f5e7fb6fc2a865970667616376aeba4c7993df8fea4" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.526265 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.538360 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.538766 4998 scope.go:117] "RemoveContainer" containerID="cabb5051e92ea728b58b2e5f6c017812f04640a06b66da9b92064814d7881f01" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.538838 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.539289 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.539359 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.539964 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.541795 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.545742 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:21 crc kubenswrapper[4998]: E0203 07:09:21.545815 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.553646 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9c9a-account-create-update-p7lvz"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.564075 4998 scope.go:117] "RemoveContainer" containerID="115a3825a27cab7fdfb490f904a7aeed06539b9f2cfa910218ec08fed15d9afa" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.582831 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9c9a-account-create-update-p7lvz"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.597827 4998 scope.go:117] "RemoveContainer" containerID="dd8e69804bea83a06ecc142f3c827d453c26167985e7a893d560d1c6cc6142bb" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.601338 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.621573 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-55ccbc8794-9m7vc"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.637803 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.643703 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5d94bbfff8-q2v5c"] Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.699540 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrgqn\" (UniqueName: \"kubernetes.io/projected/17941a92-4153-44a0-aab8-57fdb95a272f-kube-api-access-vrgqn\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.699600 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17941a92-4153-44a0-aab8-57fdb95a272f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.852593 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:09:21 crc kubenswrapper[4998]: I0203 07:09:21.857362 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007730 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wg485\" (UniqueName: \"kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485\") pod \"5f513eeb-f447-48ba-a53a-096d4aa0275a\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007790 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts\") pod \"5f513eeb-f447-48ba-a53a-096d4aa0275a\" (UID: \"5f513eeb-f447-48ba-a53a-096d4aa0275a\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007827 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007871 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007901 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007966 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.007996 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.008020 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.008052 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.008068 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4v8n\" (UniqueName: \"kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n\") pod \"f5714626-00c5-4b11-b056-40ff428fc017\" (UID: \"f5714626-00c5-4b11-b056-40ff428fc017\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.013008 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f513eeb-f447-48ba-a53a-096d4aa0275a" (UID: "5f513eeb-f447-48ba-a53a-096d4aa0275a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.013728 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n" (OuterVolumeSpecName: "kube-api-access-l4v8n") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "kube-api-access-l4v8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.014131 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.014642 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.015637 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.016354 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.017178 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485" (OuterVolumeSpecName: "kube-api-access-wg485") pod "5f513eeb-f447-48ba-a53a-096d4aa0275a" (UID: "5f513eeb-f447-48ba-a53a-096d4aa0275a"). InnerVolumeSpecName "kube-api-access-wg485". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.039182 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "mysql-db") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.039296 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.057329 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "f5714626-00c5-4b11-b056-40ff428fc017" (UID: "f5714626-00c5-4b11-b056-40ff428fc017"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.110940 4998 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.110986 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4v8n\" (UniqueName: \"kubernetes.io/projected/f5714626-00c5-4b11-b056-40ff428fc017-kube-api-access-l4v8n\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111003 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wg485\" (UniqueName: \"kubernetes.io/projected/5f513eeb-f447-48ba-a53a-096d4aa0275a-kube-api-access-wg485\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111015 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f513eeb-f447-48ba-a53a-096d4aa0275a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111029 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111040 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5714626-00c5-4b11-b056-40ff428fc017-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111050 4998 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111084 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111097 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5714626-00c5-4b11-b056-40ff428fc017-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.111109 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5714626-00c5-4b11-b056-40ff428fc017-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.129589 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.147913 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_07088226-5029-4477-a6e1-85fd28c08f4b/ovn-northd/0.log" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.148059 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212685 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212755 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212808 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212856 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212942 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.212967 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.213016 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9d9h\" (UniqueName: \"kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h\") pod \"07088226-5029-4477-a6e1-85fd28c08f4b\" (UID: \"07088226-5029-4477-a6e1-85fd28c08f4b\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.213433 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.217194 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h" (OuterVolumeSpecName: "kube-api-access-r9d9h") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "kube-api-access-r9d9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.217696 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config" (OuterVolumeSpecName: "config") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.218116 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.225056 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts" (OuterVolumeSpecName: "scripts") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.246806 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.304709 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.310041 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "07088226-5029-4477-a6e1-85fd28c08f4b" (UID: "07088226-5029-4477-a6e1-85fd28c08f4b"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315095 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315133 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-rundir\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315147 4998 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315160 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9d9h\" (UniqueName: \"kubernetes.io/projected/07088226-5029-4477-a6e1-85fd28c08f4b-kube-api-access-r9d9h\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315172 4998 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315187 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07088226-5029-4477-a6e1-85fd28c08f4b-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.315197 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07088226-5029-4477-a6e1-85fd28c08f4b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.376162 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.420157 4998 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.420239 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data podName:59f5a5d7-787a-4941-a2d3-2fe8db65cb31 nodeName:}" failed. No retries permitted until 2026-02-03 07:09:30.42021628 +0000 UTC m=+1408.706910086 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data") pod "rabbitmq-server-0" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31") : configmap "rabbitmq-config-data" not found Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.442318 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" path="/var/lib/kubelet/pods/0df5b57a-e165-41ef-8e19-30b87b9566f3/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.443156 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" path="/var/lib/kubelet/pods/13caad28-67cf-4251-9a98-e324e6f9722a/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.443707 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17941a92-4153-44a0-aab8-57fdb95a272f" path="/var/lib/kubelet/pods/17941a92-4153-44a0-aab8-57fdb95a272f/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.444665 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" path="/var/lib/kubelet/pods/18701d06-8e80-4822-9128-dd9ba0e5bf1c/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.445592 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" path="/var/lib/kubelet/pods/3c2bbe1b-74b6-4e3c-8468-735ad0b00146/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.446235 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" path="/var/lib/kubelet/pods/3e51da52-0dd9-4394-bb81-c4a1e534ad17/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.447370 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" path="/var/lib/kubelet/pods/59162297-8dd9-4ddd-a18b-8045d2f6c610/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.449621 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qdfsj" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.448256 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" path="/var/lib/kubelet/pods/8a94b6e1-cdf7-4088-9f55-60457fa411f4/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.459687 4998 generic.go:334] "Generic (PLEG): container finished" podID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerID="098c0a6d87cc827e487059fcc544a85b758f02d99c658f66e5fbf5c96ee6f34c" exitCode=0 Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.459741 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" path="/var/lib/kubelet/pods/92b2a8f5-7091-4044-a057-3fc94b78439c/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.463840 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" path="/var/lib/kubelet/pods/b1280d3d-d626-4af9-b262-93fea6a5bbc9/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.466048 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" path="/var/lib/kubelet/pods/c122d5d6-c472-46c4-9baf-195893bff38a/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.467364 4998 generic.go:334] "Generic (PLEG): container finished" podID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerID="0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2" exitCode=0 Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.467471 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.467972 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df47c2af-c1ee-4ab7-9691-ea6e28fa1831" path="/var/lib/kubelet/pods/df47c2af-c1ee-4ab7-9691-ea6e28fa1831/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.471566 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" path="/var/lib/kubelet/pods/ec1012bb-b11f-4248-aa77-f9076a2a1fc9/volumes" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.476234 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.486377 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_07088226-5029-4477-a6e1-85fd28c08f4b/ovn-northd/0.log" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.486417 4998 generic.go:334] "Generic (PLEG): container finished" podID="07088226-5029-4477-a6e1-85fd28c08f4b" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" exitCode=139 Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.486486 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491272 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qdfsj" event={"ID":"5f513eeb-f447-48ba-a53a-096d4aa0275a","Type":"ContainerDied","Data":"47133428fbbe8ccc4e3ae256d056ff15d38f52e08ddc15a657d2eb13f5ac9a84"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491311 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerDied","Data":"098c0a6d87cc827e487059fcc544a85b758f02d99c658f66e5fbf5c96ee6f34c"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491327 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerDied","Data":"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491346 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cc9d5160-2c51-474c-aca1-1af693753ee8","Type":"ContainerDied","Data":"a9e81aac73ee011ec939e75ef522f0676589317f53a4cf153e1c39705a414787"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491357 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5714626-00c5-4b11-b056-40ff428fc017","Type":"ContainerDied","Data":"a3a00d41560da21f0e8ee8c1772a44096375d772023552c296e12fdc5ebfa631"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491366 4998 scope.go:117] "RemoveContainer" containerID="0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491376 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerDied","Data":"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.491526 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"07088226-5029-4477-a6e1-85fd28c08f4b","Type":"ContainerDied","Data":"2cdd99896bab2110d33a99e37505b18a533323515d07709ebc04752d31e7014b"} Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521441 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521509 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521558 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521629 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmfjm\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521665 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521736 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521823 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521859 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521887 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.521962 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf\") pod \"cc9d5160-2c51-474c-aca1-1af693753ee8\" (UID: \"cc9d5160-2c51-474c-aca1-1af693753ee8\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.522591 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.523370 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.523661 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.523682 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.523895 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.524179 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.526900 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info" (OuterVolumeSpecName: "pod-info") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.527165 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.527987 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm" (OuterVolumeSpecName: "kube-api-access-hmfjm") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "kube-api-access-hmfjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.530346 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.531702 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.536574 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-qdfsj"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.548446 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data" (OuterVolumeSpecName: "config-data") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.564271 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf" (OuterVolumeSpecName: "server-conf") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.587603 4998 scope.go:117] "RemoveContainer" containerID="ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.608890 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.617135 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.624924 4998 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.624964 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.624991 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.625004 4998 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc9d5160-2c51-474c-aca1-1af693753ee8-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.625018 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmfjm\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-kube-api-access-hmfjm\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.625031 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.625044 4998 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc9d5160-2c51-474c-aca1-1af693753ee8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.625055 4998 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc9d5160-2c51-474c-aca1-1af693753ee8-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.664370 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cc9d5160-2c51-474c-aca1-1af693753ee8" (UID: "cc9d5160-2c51-474c-aca1-1af693753ee8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.665450 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.677039 4998 scope.go:117] "RemoveContainer" containerID="0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2" Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.678057 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2\": container with ID starting with 0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2 not found: ID does not exist" containerID="0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.678090 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2"} err="failed to get container status \"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2\": rpc error: code = NotFound desc = could not find container \"0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2\": container with ID starting with 0e9dcff5c5f1e34e7f9a045f44319200346178396df08e6468976e6cb812d2c2 not found: ID does not exist" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.678111 4998 scope.go:117] "RemoveContainer" containerID="ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.682742 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.683220 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a\": container with ID starting with ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a not found: ID does not exist" containerID="ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.683245 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a"} err="failed to get container status \"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a\": rpc error: code = NotFound desc = could not find container \"ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a\": container with ID starting with ce1c3d4a471ef44ed9a8e887a5f8fba20bb369dda1c3bbb7370146268725d19a not found: ID does not exist" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.683261 4998 scope.go:117] "RemoveContainer" containerID="7fc3cd4817b7865d5e94684c7fc6231e72d32602e47f8979e462f57f046e481a" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.690364 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.691814 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726541 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726587 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726657 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726760 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726821 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726846 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mxrp\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726883 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726923 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726956 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726984 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.726986 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.727008 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf\") pod \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\" (UID: \"59f5a5d7-787a-4941-a2d3-2fe8db65cb31\") " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.727354 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.727377 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.727392 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc9d5160-2c51-474c-aca1-1af693753ee8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.727566 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.728124 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.730920 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp" (OuterVolumeSpecName: "kube-api-access-9mxrp") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "kube-api-access-9mxrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.732044 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info" (OuterVolumeSpecName: "pod-info") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.733860 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.736769 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.736908 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.744488 4998 scope.go:117] "RemoveContainer" containerID="15a9b706d9e763c27648d5c9f3540ad2007d24d255d06bd65df06844ced8658f" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.801412 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data" (OuterVolumeSpecName: "config-data") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.829744 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830010 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mxrp\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-kube-api-access-9mxrp\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830090 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830157 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830224 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830276 4998 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830343 4998 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.830394 4998 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.848567 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.853010 4998 scope.go:117] "RemoveContainer" containerID="a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.855931 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.859619 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.860155 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf" (OuterVolumeSpecName: "server-conf") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.874022 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "59f5a5d7-787a-4941-a2d3-2fe8db65cb31" (UID: "59f5a5d7-787a-4941-a2d3-2fe8db65cb31"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.888143 4998 scope.go:117] "RemoveContainer" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.901703 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.914506 4998 scope.go:117] "RemoveContainer" containerID="a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db" Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.915092 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db\": container with ID starting with a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db not found: ID does not exist" containerID="a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.915132 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db"} err="failed to get container status \"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db\": rpc error: code = NotFound desc = could not find container \"a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db\": container with ID starting with a172aacd50c8df3a6ef1e772e9a7a1499cc74d9310858af6283ac53328c157db not found: ID does not exist" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.915161 4998 scope.go:117] "RemoveContainer" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" Feb 03 07:09:22 crc kubenswrapper[4998]: E0203 07:09:22.915425 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8\": container with ID starting with dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8 not found: ID does not exist" containerID="dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.915453 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8"} err="failed to get container status \"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8\": rpc error: code = NotFound desc = could not find container \"dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8\": container with ID starting with dd33cc14982f5158ae849d70849339c78aeeeb743bea375fe57e0cde15366ab8 not found: ID does not exist" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.932302 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.932338 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:22 crc kubenswrapper[4998]: I0203 07:09:22.932348 4998 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59f5a5d7-787a-4941-a2d3-2fe8db65cb31-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033429 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033521 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzss2\" (UniqueName: \"kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033554 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033654 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033695 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033871 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.033927 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys\") pod \"4e13372a-d92b-4928-9e27-c1422d685e05\" (UID: \"4e13372a-d92b-4928-9e27-c1422d685e05\") " Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.037003 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.037218 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.037869 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2" (OuterVolumeSpecName: "kube-api-access-mzss2") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "kube-api-access-mzss2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.038167 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts" (OuterVolumeSpecName: "scripts") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.053551 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.054768 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data" (OuterVolumeSpecName: "config-data") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.072105 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.073226 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e13372a-d92b-4928-9e27-c1422d685e05" (UID: "4e13372a-d92b-4928-9e27-c1422d685e05"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135569 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135614 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135628 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135640 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135649 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135659 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135693 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzss2\" (UniqueName: \"kubernetes.io/projected/4e13372a-d92b-4928-9e27-c1422d685e05-kube-api-access-mzss2\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.135704 4998 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e13372a-d92b-4928-9e27-c1422d685e05-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.497158 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"59f5a5d7-787a-4941-a2d3-2fe8db65cb31","Type":"ContainerDied","Data":"3a4a5c41d8f321c35b88f725c44754cf52f2388e8495c38d53c001c5abe94c33"} Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.497224 4998 scope.go:117] "RemoveContainer" containerID="098c0a6d87cc827e487059fcc544a85b758f02d99c658f66e5fbf5c96ee6f34c" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.497396 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.502318 4998 generic.go:334] "Generic (PLEG): container finished" podID="4e13372a-d92b-4928-9e27-c1422d685e05" containerID="2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565" exitCode=0 Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.502358 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c7dff8f57-z8bvm" event={"ID":"4e13372a-d92b-4928-9e27-c1422d685e05","Type":"ContainerDied","Data":"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565"} Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.502380 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c7dff8f57-z8bvm" event={"ID":"4e13372a-d92b-4928-9e27-c1422d685e05","Type":"ContainerDied","Data":"7d46b3dc28148a42a558f264dfe35b76a89f9cf679104f7bafabfce67f6febc6"} Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.502449 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c7dff8f57-z8bvm" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.520900 4998 scope.go:117] "RemoveContainer" containerID="76b83e5ba7d49d4a25c729bc84136855824cc063c69e5eb9aff88e90d282666d" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.565153 4998 scope.go:117] "RemoveContainer" containerID="2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.581227 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.600344 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.606458 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.611270 4998 scope.go:117] "RemoveContainer" containerID="2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.612875 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6c7dff8f57-z8bvm"] Feb 03 07:09:23 crc kubenswrapper[4998]: E0203 07:09:23.614213 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565\": container with ID starting with 2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565 not found: ID does not exist" containerID="2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565" Feb 03 07:09:23 crc kubenswrapper[4998]: I0203 07:09:23.614249 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565"} err="failed to get container status \"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565\": rpc error: code = NotFound desc = could not find container \"2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565\": container with ID starting with 2c023f77ba3c53d6dba2a954909d4bfd4b6a41baa1708019bf6cafdf0e585565 not found: ID does not exist" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.202871 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.260642 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.439918 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" path="/var/lib/kubelet/pods/07088226-5029-4477-a6e1-85fd28c08f4b/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.440876 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e13372a-d92b-4928-9e27-c1422d685e05" path="/var/lib/kubelet/pods/4e13372a-d92b-4928-9e27-c1422d685e05/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.441671 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" path="/var/lib/kubelet/pods/59f5a5d7-787a-4941-a2d3-2fe8db65cb31/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.443097 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f513eeb-f447-48ba-a53a-096d4aa0275a" path="/var/lib/kubelet/pods/5f513eeb-f447-48ba-a53a-096d4aa0275a/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.443795 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" path="/var/lib/kubelet/pods/cc9d5160-2c51-474c-aca1-1af693753ee8/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.454830 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5714626-00c5-4b11-b056-40ff428fc017" path="/var/lib/kubelet/pods/f5714626-00c5-4b11-b056-40ff428fc017/volumes" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.455495 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.459972 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-594c6c97c7-9bqhd" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.165:9696/\": dial tcp 10.217.0.165:9696: connect: connection refused" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.528441 4998 generic.go:334] "Generic (PLEG): container finished" podID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerID="a7442e55da5b397ed68925bc02a81c7967c0ba70c8e805f561094c6b1def8d26" exitCode=0 Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.529028 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerDied","Data":"a7442e55da5b397ed68925bc02a81c7967c0ba70c8e805f561094c6b1def8d26"} Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.669948 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757636 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757695 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sndgt\" (UniqueName: \"kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757777 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757824 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757854 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757955 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.757984 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.758007 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml\") pod \"20fec019-d2d1-4625-960c-c16004cfa5aa\" (UID: \"20fec019-d2d1-4625-960c-c16004cfa5aa\") " Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.760040 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.760620 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.766613 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts" (OuterVolumeSpecName: "scripts") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.766659 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt" (OuterVolumeSpecName: "kube-api-access-sndgt") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "kube-api-access-sndgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.782374 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.829683 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.830168 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.843273 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data" (OuterVolumeSpecName: "config-data") pod "20fec019-d2d1-4625-960c-c16004cfa5aa" (UID: "20fec019-d2d1-4625-960c-c16004cfa5aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.859927 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.859992 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sndgt\" (UniqueName: \"kubernetes.io/projected/20fec019-d2d1-4625-960c-c16004cfa5aa-kube-api-access-sndgt\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860005 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860015 4998 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860024 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860032 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20fec019-d2d1-4625-960c-c16004cfa5aa-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860041 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:24 crc kubenswrapper[4998]: I0203 07:09:24.860051 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20fec019-d2d1-4625-960c-c16004cfa5aa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.540759 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b7g8v" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="registry-server" containerID="cri-o://06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678" gracePeriod=2 Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.540949 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.543236 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20fec019-d2d1-4625-960c-c16004cfa5aa","Type":"ContainerDied","Data":"c68d5a752f8542c881b80d817b6cd229d8246b8a0c06779e1b712183c0be6221"} Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.543314 4998 scope.go:117] "RemoveContainer" containerID="bd760878032362b5e2dbbae0a17767478623ce84fedeea219d3003f8e309bbec" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.578418 4998 scope.go:117] "RemoveContainer" containerID="da3d6f3489ff3ea9baaa57c3a51e435fe24ab651749286ef5d6fb275fe2c2a08" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.593232 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.600343 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.606991 4998 scope.go:117] "RemoveContainer" containerID="a7442e55da5b397ed68925bc02a81c7967c0ba70c8e805f561094c6b1def8d26" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.748085 4998 scope.go:117] "RemoveContainer" containerID="735a947af0313563fd152251e4f01a7d98bb038f3738b13aa0956d4060411491" Feb 03 07:09:25 crc kubenswrapper[4998]: I0203 07:09:25.993475 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.091805 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities\") pod \"03de7e09-127d-4746-a01c-e5b6a3618014\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.091932 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content\") pod \"03de7e09-127d-4746-a01c-e5b6a3618014\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.092041 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plltc\" (UniqueName: \"kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc\") pod \"03de7e09-127d-4746-a01c-e5b6a3618014\" (UID: \"03de7e09-127d-4746-a01c-e5b6a3618014\") " Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.092645 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities" (OuterVolumeSpecName: "utilities") pod "03de7e09-127d-4746-a01c-e5b6a3618014" (UID: "03de7e09-127d-4746-a01c-e5b6a3618014"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.099750 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc" (OuterVolumeSpecName: "kube-api-access-plltc") pod "03de7e09-127d-4746-a01c-e5b6a3618014" (UID: "03de7e09-127d-4746-a01c-e5b6a3618014"). InnerVolumeSpecName "kube-api-access-plltc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.193390 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plltc\" (UniqueName: \"kubernetes.io/projected/03de7e09-127d-4746-a01c-e5b6a3618014-kube-api-access-plltc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.193426 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.218584 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03de7e09-127d-4746-a01c-e5b6a3618014" (UID: "03de7e09-127d-4746-a01c-e5b6a3618014"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.294225 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03de7e09-127d-4746-a01c-e5b6a3618014-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.436822 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" path="/var/lib/kubelet/pods/20fec019-d2d1-4625-960c-c16004cfa5aa/volumes" Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.537988 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.538537 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.538849 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.538905 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.539496 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.541036 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.549416 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.549468 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.556669 4998 generic.go:334] "Generic (PLEG): container finished" podID="03de7e09-127d-4746-a01c-e5b6a3618014" containerID="06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678" exitCode=0 Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.556733 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7g8v" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.556739 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerDied","Data":"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678"} Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.556812 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7g8v" event={"ID":"03de7e09-127d-4746-a01c-e5b6a3618014","Type":"ContainerDied","Data":"938b29719134e5a4cde384d1ddf89bc5794bc61b4cc828cc7ec934f6d8fd0b48"} Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.556830 4998 scope.go:117] "RemoveContainer" containerID="06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.581289 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.587529 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b7g8v"] Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.591018 4998 scope.go:117] "RemoveContainer" containerID="c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.618511 4998 scope.go:117] "RemoveContainer" containerID="62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.642033 4998 scope.go:117] "RemoveContainer" containerID="06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678" Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.642592 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678\": container with ID starting with 06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678 not found: ID does not exist" containerID="06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.642627 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678"} err="failed to get container status \"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678\": rpc error: code = NotFound desc = could not find container \"06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678\": container with ID starting with 06b3778b3efbeaf309b9437aea0022bdc6ac9da6c81e8a3d61e80702481f6678 not found: ID does not exist" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.642687 4998 scope.go:117] "RemoveContainer" containerID="c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043" Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.642938 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043\": container with ID starting with c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043 not found: ID does not exist" containerID="c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.642962 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043"} err="failed to get container status \"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043\": rpc error: code = NotFound desc = could not find container \"c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043\": container with ID starting with c97a1ae836dd764480c9bc340d5d428deeb248e8d300cd3497c09df46ecda043 not found: ID does not exist" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.642978 4998 scope.go:117] "RemoveContainer" containerID="62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c" Feb 03 07:09:26 crc kubenswrapper[4998]: E0203 07:09:26.643260 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c\": container with ID starting with 62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c not found: ID does not exist" containerID="62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c" Feb 03 07:09:26 crc kubenswrapper[4998]: I0203 07:09:26.643300 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c"} err="failed to get container status \"62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c\": rpc error: code = NotFound desc = could not find container \"62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c\": container with ID starting with 62e265be30824ef3b0cb94882808022017ca92cecf1d6af12ed9164d90e2ec4c not found: ID does not exist" Feb 03 07:09:28 crc kubenswrapper[4998]: I0203 07:09:28.438053 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" path="/var/lib/kubelet/pods/03de7e09-127d-4746-a01c-e5b6a3618014/volumes" Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.539024 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.540621 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.540964 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.541199 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.541274 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.543133 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.544886 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:31 crc kubenswrapper[4998]: E0203 07:09:31.544945 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.638444 4998 generic.go:334] "Generic (PLEG): container finished" podID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerID="f6359e2d805c5da84deb6d70b3a2cf1b151546db9cb67ce6157717737983ed9d" exitCode=0 Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.638499 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerDied","Data":"f6359e2d805c5da84deb6d70b3a2cf1b151546db9cb67ce6157717737983ed9d"} Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.638528 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594c6c97c7-9bqhd" event={"ID":"e24d47fe-485b-4ceb-bdab-25b10ac92fa2","Type":"ContainerDied","Data":"d866bdc02d6dd3fc8a64812f15757edbc7ccd910d2969ec706979857d66307c1"} Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.638541 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d866bdc02d6dd3fc8a64812f15757edbc7ccd910d2969ec706979857d66307c1" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.680167 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798487 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798591 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798610 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798630 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvgmm\" (UniqueName: \"kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798661 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798678 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.798735 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config\") pod \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\" (UID: \"e24d47fe-485b-4ceb-bdab-25b10ac92fa2\") " Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.808796 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.817054 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm" (OuterVolumeSpecName: "kube-api-access-vvgmm") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "kube-api-access-vvgmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.839749 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.842689 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.851999 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config" (OuterVolumeSpecName: "config") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.856943 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.858514 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e24d47fe-485b-4ceb-bdab-25b10ac92fa2" (UID: "e24d47fe-485b-4ceb-bdab-25b10ac92fa2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.900899 4998 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.900978 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.900992 4998 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.901005 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvgmm\" (UniqueName: \"kubernetes.io/projected/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-kube-api-access-vvgmm\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.901018 4998 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.901029 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:33 crc kubenswrapper[4998]: I0203 07:09:33.901040 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e24d47fe-485b-4ceb-bdab-25b10ac92fa2-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:34 crc kubenswrapper[4998]: I0203 07:09:34.646343 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594c6c97c7-9bqhd" Feb 03 07:09:34 crc kubenswrapper[4998]: I0203 07:09:34.672330 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:09:34 crc kubenswrapper[4998]: I0203 07:09:34.678839 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-594c6c97c7-9bqhd"] Feb 03 07:09:36 crc kubenswrapper[4998]: I0203 07:09:36.441027 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" path="/var/lib/kubelet/pods/e24d47fe-485b-4ceb-bdab-25b10ac92fa2/volumes" Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.538531 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.539145 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.539464 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.539763 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.539819 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.540662 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.542826 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:36 crc kubenswrapper[4998]: E0203 07:09:36.542871 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.538760 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.539734 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.540216 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.540272 4998 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.541660 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.543865 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.545555 4998 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 03 07:09:41 crc kubenswrapper[4998]: E0203 07:09:41.545609 4998 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-t4p58" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:09:42 crc kubenswrapper[4998]: I0203 07:09:42.754350 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:09:42 crc kubenswrapper[4998]: I0203 07:09:42.754406 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:09:42 crc kubenswrapper[4998]: I0203 07:09:42.754444 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:09:42 crc kubenswrapper[4998]: I0203 07:09:42.755037 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:09:42 crc kubenswrapper[4998]: I0203 07:09:42.755093 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4" gracePeriod=600 Feb 03 07:09:43 crc kubenswrapper[4998]: I0203 07:09:43.738146 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4" exitCode=0 Feb 03 07:09:43 crc kubenswrapper[4998]: I0203 07:09:43.738432 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4"} Feb 03 07:09:43 crc kubenswrapper[4998]: I0203 07:09:43.738461 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8"} Feb 03 07:09:43 crc kubenswrapper[4998]: I0203 07:09:43.738480 4998 scope.go:117] "RemoveContainer" containerID="03cdfa2638a496a4e32ad344d4242ab7d52b707f81b68db6d7febb228a19986d" Feb 03 07:09:44 crc kubenswrapper[4998]: I0203 07:09:44.754084 4998 generic.go:334] "Generic (PLEG): container finished" podID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerID="b9cf13c79eeb6224fa7a06ecf85b9c0950a6c413aa4a0ee378c2547496f98817" exitCode=137 Feb 03 07:09:44 crc kubenswrapper[4998]: I0203 07:09:44.754137 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerDied","Data":"b9cf13c79eeb6224fa7a06ecf85b9c0950a6c413aa4a0ee378c2547496f98817"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.072328 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261516 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261681 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261727 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261735 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261755 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krqn4\" (UniqueName: \"kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261802 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.261844 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle\") pod \"ecd25f56-731a-4b58-837d-7d81dc9f595e\" (UID: \"ecd25f56-731a-4b58-837d-7d81dc9f595e\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.262188 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd25f56-731a-4b58-837d-7d81dc9f595e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.266929 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.267006 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4" (OuterVolumeSpecName: "kube-api-access-krqn4") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "kube-api-access-krqn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.267499 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts" (OuterVolumeSpecName: "scripts") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.307141 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.355179 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-t4p58_5e71558d-268c-4680-b43c-9fb48f34b38f/ovs-vswitchd/0.log" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.357446 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.358970 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data" (OuterVolumeSpecName: "config-data") pod "ecd25f56-731a-4b58-837d-7d81dc9f595e" (UID: "ecd25f56-731a-4b58-837d-7d81dc9f595e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.364560 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.364617 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.364630 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.364668 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krqn4\" (UniqueName: \"kubernetes.io/projected/ecd25f56-731a-4b58-837d-7d81dc9f595e-kube-api-access-krqn4\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.364684 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd25f56-731a-4b58-837d-7d81dc9f595e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.462309 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465409 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465476 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465485 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465502 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz5jz\" (UniqueName: \"kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib" (OuterVolumeSpecName: "var-lib") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465717 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465767 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465831 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run\") pod \"5e71558d-268c-4680-b43c-9fb48f34b38f\" (UID: \"5e71558d-268c-4680-b43c-9fb48f34b38f\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465886 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log" (OuterVolumeSpecName: "var-log") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.465996 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run" (OuterVolumeSpecName: "var-run") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.466369 4998 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-lib\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.466386 4998 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-log\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.466395 4998 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-var-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.466407 4998 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5e71558d-268c-4680-b43c-9fb48f34b38f-etc-ovs\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.466712 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts" (OuterVolumeSpecName: "scripts") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.468419 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz" (OuterVolumeSpecName: "kube-api-access-wz5jz") pod "5e71558d-268c-4680-b43c-9fb48f34b38f" (UID: "5e71558d-268c-4680-b43c-9fb48f34b38f"). InnerVolumeSpecName "kube-api-access-wz5jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567413 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567506 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fpss\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567582 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567613 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567650 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567681 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\" (UID: \"09cd9158-f279-4ac0-b8fe-0121e85a1b20\") " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.567990 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz5jz\" (UniqueName: \"kubernetes.io/projected/5e71558d-268c-4680-b43c-9fb48f34b38f-kube-api-access-wz5jz\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.568012 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e71558d-268c-4680-b43c-9fb48f34b38f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.568453 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock" (OuterVolumeSpecName: "lock") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.568982 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache" (OuterVolumeSpecName: "cache") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.571432 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "swift") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.571451 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss" (OuterVolumeSpecName: "kube-api-access-7fpss") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "kube-api-access-7fpss". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.573176 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.669367 4998 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-cache\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.669406 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fpss\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-kube-api-access-7fpss\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.669446 4998 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09cd9158-f279-4ac0-b8fe-0121e85a1b20-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.669456 4998 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09cd9158-f279-4ac0-b8fe-0121e85a1b20-lock\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.669491 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.690882 4998 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.770900 4998 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.773170 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-t4p58_5e71558d-268c-4680-b43c-9fb48f34b38f/ovs-vswitchd/0.log" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.774840 4998 generic.go:334] "Generic (PLEG): container finished" podID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" exitCode=137 Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.775000 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-t4p58" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.775524 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerDied","Data":"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.775637 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-t4p58" event={"ID":"5e71558d-268c-4680-b43c-9fb48f34b38f","Type":"ContainerDied","Data":"9c4576de3e57b771dece917b27c46e4e33a5bf8685c225433d6735f65ab46790"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.775722 4998 scope.go:117] "RemoveContainer" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.778823 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd25f56-731a-4b58-837d-7d81dc9f595e","Type":"ContainerDied","Data":"276eff2f6bb6b7fb95a43ddaf758b53f20b3bd86a8e91a55f102a2e3396c6b9e"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.778878 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.790618 4998 generic.go:334] "Generic (PLEG): container finished" podID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerID="8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e" exitCode=137 Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.790672 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.790711 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"09cd9158-f279-4ac0-b8fe-0121e85a1b20","Type":"ContainerDied","Data":"d81c875fc5e7c5ea4b49f164f1c6719b557ebd7638ddc825f96d9a7ced3cb36b"} Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.790820 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.830215 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09cd9158-f279-4ac0-b8fe-0121e85a1b20" (UID: "09cd9158-f279-4ac0-b8fe-0121e85a1b20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.853269 4998 scope.go:117] "RemoveContainer" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.872096 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd9158-f279-4ac0-b8fe-0121e85a1b20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.895835 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.907070 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.912832 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.920361 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-t4p58"] Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.924813 4998 scope.go:117] "RemoveContainer" containerID="ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.986969 4998 scope.go:117] "RemoveContainer" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" Feb 03 07:09:45 crc kubenswrapper[4998]: E0203 07:09:45.991976 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28\": container with ID starting with 5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28 not found: ID does not exist" containerID="5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.992027 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28"} err="failed to get container status \"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28\": rpc error: code = NotFound desc = could not find container \"5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28\": container with ID starting with 5924834a1fe26ed73de03c1f7f47a03b21661cf375e099684ef40cf961a44b28 not found: ID does not exist" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.992059 4998 scope.go:117] "RemoveContainer" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" Feb 03 07:09:45 crc kubenswrapper[4998]: E0203 07:09:45.995124 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba\": container with ID starting with f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba not found: ID does not exist" containerID="f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.995165 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba"} err="failed to get container status \"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba\": rpc error: code = NotFound desc = could not find container \"f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba\": container with ID starting with f85f75149abeed849255a6d0f626f739397630ddacdcc737704a9bae30581fba not found: ID does not exist" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.995208 4998 scope.go:117] "RemoveContainer" containerID="ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77" Feb 03 07:09:45 crc kubenswrapper[4998]: E0203 07:09:45.999118 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77\": container with ID starting with ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77 not found: ID does not exist" containerID="ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.999164 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77"} err="failed to get container status \"ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77\": rpc error: code = NotFound desc = could not find container \"ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77\": container with ID starting with ebe92507b38a532d343256ce07ee02800377d7684eb15759ab4720c7695e1f77 not found: ID does not exist" Feb 03 07:09:45 crc kubenswrapper[4998]: I0203 07:09:45.999192 4998 scope.go:117] "RemoveContainer" containerID="232bbb0f6faa06716e49e70604c13f8b54cf7576fa7c9e4b6b3cd621731e2ac5" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.021014 4998 scope.go:117] "RemoveContainer" containerID="b9cf13c79eeb6224fa7a06ecf85b9c0950a6c413aa4a0ee378c2547496f98817" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.041228 4998 scope.go:117] "RemoveContainer" containerID="8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.056980 4998 scope.go:117] "RemoveContainer" containerID="91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.073721 4998 scope.go:117] "RemoveContainer" containerID="ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.089657 4998 scope.go:117] "RemoveContainer" containerID="81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.113687 4998 scope.go:117] "RemoveContainer" containerID="d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.141461 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.142892 4998 scope.go:117] "RemoveContainer" containerID="10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.148939 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.161945 4998 scope.go:117] "RemoveContainer" containerID="39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.181710 4998 scope.go:117] "RemoveContainer" containerID="a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.199739 4998 scope.go:117] "RemoveContainer" containerID="5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.215401 4998 scope.go:117] "RemoveContainer" containerID="e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.233181 4998 scope.go:117] "RemoveContainer" containerID="ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.260341 4998 scope.go:117] "RemoveContainer" containerID="1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.284859 4998 scope.go:117] "RemoveContainer" containerID="3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.304713 4998 scope.go:117] "RemoveContainer" containerID="3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.324993 4998 scope.go:117] "RemoveContainer" containerID="0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.340920 4998 scope.go:117] "RemoveContainer" containerID="8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.341375 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e\": container with ID starting with 8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e not found: ID does not exist" containerID="8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.341420 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e"} err="failed to get container status \"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e\": rpc error: code = NotFound desc = could not find container \"8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e\": container with ID starting with 8d491cb6f76bdb5ad3981bba0e2ccc67efd84f5bc5778d32aad45ae237cb920e not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.341455 4998 scope.go:117] "RemoveContainer" containerID="91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.341757 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831\": container with ID starting with 91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831 not found: ID does not exist" containerID="91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.341800 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831"} err="failed to get container status \"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831\": rpc error: code = NotFound desc = could not find container \"91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831\": container with ID starting with 91d35084ff382e39d07e6d23a955488bd8e1fc108bc54d221ff9c73527f12831 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.341824 4998 scope.go:117] "RemoveContainer" containerID="ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.342157 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252\": container with ID starting with ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252 not found: ID does not exist" containerID="ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342187 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252"} err="failed to get container status \"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252\": rpc error: code = NotFound desc = could not find container \"ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252\": container with ID starting with ede2cd76a85c73b54f858f8d327a0cbaf17ad5297a50c2492295ebff09d11252 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342206 4998 scope.go:117] "RemoveContainer" containerID="81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.342439 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b\": container with ID starting with 81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b not found: ID does not exist" containerID="81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342470 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b"} err="failed to get container status \"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b\": rpc error: code = NotFound desc = could not find container \"81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b\": container with ID starting with 81dac1e002ac3841ac510ce705948252966c00472415ebc65c67b828d80bdd5b not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342488 4998 scope.go:117] "RemoveContainer" containerID="d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.342715 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322\": container with ID starting with d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322 not found: ID does not exist" containerID="d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342745 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322"} err="failed to get container status \"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322\": rpc error: code = NotFound desc = could not find container \"d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322\": container with ID starting with d71043657e67f511c84e1b80c92f0b68c1da1077f14a94c55b36f366b7a2f322 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.342766 4998 scope.go:117] "RemoveContainer" containerID="10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.343059 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb\": container with ID starting with 10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb not found: ID does not exist" containerID="10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343086 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb"} err="failed to get container status \"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb\": rpc error: code = NotFound desc = could not find container \"10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb\": container with ID starting with 10dab8bfcc6645bc889616f4f108cf19114ce18a2cadd19540c5dd170f5b6fcb not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343103 4998 scope.go:117] "RemoveContainer" containerID="39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.343338 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5\": container with ID starting with 39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5 not found: ID does not exist" containerID="39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343372 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5"} err="failed to get container status \"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5\": rpc error: code = NotFound desc = could not find container \"39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5\": container with ID starting with 39817cf90fdb88cf824e056a9ffea636e16485fac2c9389858ca4ba9f02b85f5 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343395 4998 scope.go:117] "RemoveContainer" containerID="a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.343687 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656\": container with ID starting with a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656 not found: ID does not exist" containerID="a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343711 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656"} err="failed to get container status \"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656\": rpc error: code = NotFound desc = could not find container \"a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656\": container with ID starting with a561394a411f9e9905f3eb76a7bc935495a1e0f71ad59bb7e60e389e4ccf2656 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.343726 4998 scope.go:117] "RemoveContainer" containerID="5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.343971 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57\": container with ID starting with 5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57 not found: ID does not exist" containerID="5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344000 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57"} err="failed to get container status \"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57\": rpc error: code = NotFound desc = could not find container \"5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57\": container with ID starting with 5c9ec6bd23284c2b390c4776da0627623c5642e7460b4cf117dc28bd400f8c57 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344013 4998 scope.go:117] "RemoveContainer" containerID="e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.344215 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831\": container with ID starting with e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831 not found: ID does not exist" containerID="e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344236 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831"} err="failed to get container status \"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831\": rpc error: code = NotFound desc = could not find container \"e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831\": container with ID starting with e6e7540edfe4023348abde594012e510b3a4f31157f2972b831ea5fad3893831 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344249 4998 scope.go:117] "RemoveContainer" containerID="ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.344552 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29\": container with ID starting with ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29 not found: ID does not exist" containerID="ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344621 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29"} err="failed to get container status \"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29\": rpc error: code = NotFound desc = could not find container \"ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29\": container with ID starting with ddd991a0dcb30ff0ae66164089cfc01b4cf70c05ee6132bec706f3086cbf8a29 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.344673 4998 scope.go:117] "RemoveContainer" containerID="1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.345141 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75\": container with ID starting with 1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75 not found: ID does not exist" containerID="1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345167 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75"} err="failed to get container status \"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75\": rpc error: code = NotFound desc = could not find container \"1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75\": container with ID starting with 1f16d4c637461caae324eec51a6c3d5da587838e38267ba1cef416d450d84f75 not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345182 4998 scope.go:117] "RemoveContainer" containerID="3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.345469 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de\": container with ID starting with 3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de not found: ID does not exist" containerID="3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345516 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de"} err="failed to get container status \"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de\": rpc error: code = NotFound desc = could not find container \"3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de\": container with ID starting with 3eb1016c4bf7e03e9ac5f399eb54ed4c74b192d6e526a912f85055ec2a7696de not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345544 4998 scope.go:117] "RemoveContainer" containerID="3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.345878 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f\": container with ID starting with 3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f not found: ID does not exist" containerID="3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345907 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f"} err="failed to get container status \"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f\": rpc error: code = NotFound desc = could not find container \"3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f\": container with ID starting with 3a7581f975cd3425a6842b0b3e0cc3c6d6eb2078e3a460284ad126263371957f not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.345926 4998 scope.go:117] "RemoveContainer" containerID="0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f" Feb 03 07:09:46 crc kubenswrapper[4998]: E0203 07:09:46.346193 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f\": container with ID starting with 0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f not found: ID does not exist" containerID="0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.346234 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f"} err="failed to get container status \"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f\": rpc error: code = NotFound desc = could not find container \"0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f\": container with ID starting with 0f784b826f4672ca3856308b4f90c8ceacf08cf471c4dba8d28ef78a91d7985f not found: ID does not exist" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.443344 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" path="/var/lib/kubelet/pods/09cd9158-f279-4ac0-b8fe-0121e85a1b20/volumes" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.445772 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" path="/var/lib/kubelet/pods/5e71558d-268c-4680-b43c-9fb48f34b38f/volumes" Feb 03 07:09:46 crc kubenswrapper[4998]: I0203 07:09:46.446524 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" path="/var/lib/kubelet/pods/ecd25f56-731a-4b58-837d-7d81dc9f595e/volumes" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.339394 4998 scope.go:117] "RemoveContainer" containerID="ac9c68809e2d90ac2e9b04b6da1d48989afb04d2b09a5e840256a3d3c2cf1c3c" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.375663 4998 scope.go:117] "RemoveContainer" containerID="da40d3ae9e0fb4a730588154d5aae63f5fafc0fc036902d80ca808d54e2b4008" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.423224 4998 scope.go:117] "RemoveContainer" containerID="d1d6af522a783d7674ee61fcb657dee27565b766177bbd086e16a0647a84bd0d" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.447150 4998 scope.go:117] "RemoveContainer" containerID="d7fe5bc42fb67c153e920d4f68e75da5b8c4e36ef046433582efb38a391c6014" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.479129 4998 scope.go:117] "RemoveContainer" containerID="b0e3135bc61b1626cfe74dd7ffee1f8ebf4fa81d82f114e79010f590b3fe02ca" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.502994 4998 scope.go:117] "RemoveContainer" containerID="3a5719b9e841ae2459fb7dd3047f31ab6b4c642cf9f14d11a5ef0e3e58cedb5c" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.529917 4998 scope.go:117] "RemoveContainer" containerID="a491bbb6b2cb80c8238082a8dedea874440d41b3f00f624c337e1bbe06455465" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.568093 4998 scope.go:117] "RemoveContainer" containerID="061ac6733e0bb70acdd97b1a3c75dfe63d66d1c4c9298951ec4792658e9cdb4e" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.588431 4998 scope.go:117] "RemoveContainer" containerID="ccf47ac913228d1d870b36da0922a4284dd8fff26b107cf29c51bc32838a015b" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.608525 4998 scope.go:117] "RemoveContainer" containerID="e7a97b7a54e21576462742ef1d8dc1f7c269e1841dc59cd469d2f08c927fcbf6" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.639003 4998 scope.go:117] "RemoveContainer" containerID="1344285d68105646b73eef86c4c93645fecb1cf62976d14576dbc6aeeb280daf" Feb 03 07:11:04 crc kubenswrapper[4998]: I0203 07:11:04.661449 4998 scope.go:117] "RemoveContainer" containerID="3fe1b15374bfe70f0ac51f95cba99d06796b653c66388ab59d23f43ca14f0122" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.623929 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624826 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624842 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624861 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624869 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624878 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server-init" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624886 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server-init" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624901 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="openstack-network-exporter" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624908 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="openstack-network-exporter" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624921 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="extract-content" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624929 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="extract-content" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624937 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624945 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-api" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624959 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624966 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624977 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-notification-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.624984 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-notification-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.624996 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625004 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625012 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="setup-container" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625019 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="setup-container" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625028 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="setup-container" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625036 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="setup-container" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625045 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e13372a-d92b-4928-9e27-c1422d685e05" containerName="keystone-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625052 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e13372a-d92b-4928-9e27-c1422d685e05" containerName="keystone-api" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625063 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625071 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-server" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625079 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625087 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625101 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625108 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625117 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerName="nova-scheduler-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625126 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerName="nova-scheduler-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625136 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625143 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625155 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="mysql-bootstrap" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625162 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="mysql-bootstrap" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625176 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="proxy-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625182 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="proxy-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625192 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625200 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-api" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625212 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11bbac6a-fd7e-447b-af99-d0ebada848df" containerName="kube-state-metrics" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625219 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="11bbac6a-fd7e-447b-af99-d0ebada848df" containerName="kube-state-metrics" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625227 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625234 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625242 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="cinder-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625248 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="cinder-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625255 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625263 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625273 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="probe" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625280 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="probe" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625288 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625295 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625308 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-reaper" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625315 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-reaper" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625324 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625331 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625339 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="galera" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625347 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="galera" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625359 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625366 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625377 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625385 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625397 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625405 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625418 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="swift-recon-cron" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625425 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="swift-recon-cron" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625433 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625440 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625448 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="sg-core" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625454 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="sg-core" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625467 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="extract-utilities" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625474 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="extract-utilities" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625484 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625491 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625497 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625503 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625512 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625517 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625527 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625532 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625541 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="rsync" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625547 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="rsync" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625557 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625563 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-server" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625573 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerName="nova-cell0-conductor-conductor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625578 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerName="nova-cell0-conductor-conductor" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625585 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-expirer" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625593 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-expirer" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625604 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625611 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625618 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625624 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625631 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625638 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625647 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="ovn-northd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625653 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="ovn-northd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625660 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625667 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625674 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-central-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625680 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-central-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625686 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625693 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625699 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625706 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625713 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625719 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-server" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625726 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" containerName="memcached" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625731 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" containerName="memcached" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625739 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625746 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625754 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625760 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625767 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625773 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625796 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="registry-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625802 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="registry-server" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625811 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625816 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: E0203 07:11:39.625824 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625830 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625954 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625968 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovsdb-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625978 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e71558d-268c-4680-b43c-9fb48f34b38f" containerName="ovs-vswitchd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625986 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="sg-core" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.625996 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="probe" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626004 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626012 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626021 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626031 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5714626-00c5-4b11-b056-40ff428fc017" containerName="galera" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626037 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626043 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="proxy-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626053 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626062 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c2bbe1b-74b6-4e3c-8468-735ad0b00146" containerName="memcached" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626071 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-auditor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626081 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="ovn-northd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626087 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626097 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626105 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="rsync" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626113 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626121 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-metadata" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626127 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626134 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-reaper" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626140 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-expirer" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626147 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="07088226-5029-4477-a6e1-85fd28c08f4b" containerName="openstack-network-exporter" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626156 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc9d5160-2c51-474c-aca1-1af693753ee8" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626162 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626169 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-central-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626177 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e13372a-d92b-4928-9e27-c1422d685e05" containerName="keystone-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626184 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626189 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd25f56-731a-4b58-837d-7d81dc9f595e" containerName="cinder-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626196 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-httpd" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626205 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1280d3d-d626-4af9-b262-93fea6a5bbc9" containerName="nova-api-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626211 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c122d5d6-c472-46c4-9baf-195893bff38a" containerName="placement-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626220 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="18701d06-8e80-4822-9128-dd9ba0e5bf1c" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626226 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e24d47fe-485b-4ceb-bdab-25b10ac92fa2" containerName="neutron-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626236 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-updater" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626245 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="swift-recon-cron" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626251 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="13caad28-67cf-4251-9a98-e324e6f9722a" containerName="barbican-keystone-listener-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626258 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="account-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626265 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fec019-d2d1-4625-960c-c16004cfa5aa" containerName="ceilometer-notification-agent" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626272 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626279 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="11bbac6a-fd7e-447b-af99-d0ebada848df" containerName="kube-state-metrics" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626287 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="59f5a5d7-787a-4941-a2d3-2fe8db65cb31" containerName="rabbitmq" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626295 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4767a1f8-263b-4c49-8cb7-7a7f9b8271dc" containerName="glance-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626302 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec1012bb-b11f-4248-aa77-f9076a2a1fc9" containerName="nova-cell0-conductor-conductor" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626312 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626321 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df5b57a-e165-41ef-8e19-30b87b9566f3" containerName="barbican-worker" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626329 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a94b6e1-cdf7-4088-9f55-60457fa411f4" containerName="nova-scheduler-scheduler" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626336 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e51da52-0dd9-4394-bb81-c4a1e534ad17" containerName="nova-metadata-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626343 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="object-replicator" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626353 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="92b2a8f5-7091-4044-a057-3fc94b78439c" containerName="barbican-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626362 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="03de7e09-127d-4746-a01c-e5b6a3618014" containerName="registry-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626370 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd9158-f279-4ac0-b8fe-0121e85a1b20" containerName="container-server" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.626377 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="59162297-8dd9-4ddd-a18b-8045d2f6c610" containerName="cinder-api-log" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.627344 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.647316 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.774616 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.774698 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.774933 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b45fh\" (UniqueName: \"kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.876320 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.876412 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.876498 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b45fh\" (UniqueName: \"kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.876904 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.876927 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.896930 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b45fh\" (UniqueName: \"kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh\") pod \"certified-operators-w4tbg\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:39 crc kubenswrapper[4998]: I0203 07:11:39.946151 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:40 crc kubenswrapper[4998]: I0203 07:11:40.483154 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:40 crc kubenswrapper[4998]: I0203 07:11:40.798651 4998 generic.go:334] "Generic (PLEG): container finished" podID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerID="ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89" exitCode=0 Feb 03 07:11:40 crc kubenswrapper[4998]: I0203 07:11:40.798733 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerDied","Data":"ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89"} Feb 03 07:11:40 crc kubenswrapper[4998]: I0203 07:11:40.801437 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerStarted","Data":"425272879ef164fd22ea17ea5c5c01010910e720ecc1564bceb4640a9d1ee557"} Feb 03 07:11:41 crc kubenswrapper[4998]: I0203 07:11:41.814671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerStarted","Data":"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0"} Feb 03 07:11:42 crc kubenswrapper[4998]: I0203 07:11:42.823436 4998 generic.go:334] "Generic (PLEG): container finished" podID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerID="aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0" exitCode=0 Feb 03 07:11:42 crc kubenswrapper[4998]: I0203 07:11:42.823490 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerDied","Data":"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0"} Feb 03 07:11:43 crc kubenswrapper[4998]: I0203 07:11:43.833630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerStarted","Data":"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658"} Feb 03 07:11:43 crc kubenswrapper[4998]: I0203 07:11:43.862679 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w4tbg" podStartSLOduration=2.443175507 podStartE2EDuration="4.862659752s" podCreationTimestamp="2026-02-03 07:11:39 +0000 UTC" firstStartedPulling="2026-02-03 07:11:40.801451799 +0000 UTC m=+1539.088145625" lastFinishedPulling="2026-02-03 07:11:43.220936054 +0000 UTC m=+1541.507629870" observedRunningTime="2026-02-03 07:11:43.858959437 +0000 UTC m=+1542.145653253" watchObservedRunningTime="2026-02-03 07:11:43.862659752 +0000 UTC m=+1542.149353558" Feb 03 07:11:49 crc kubenswrapper[4998]: I0203 07:11:49.946941 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:49 crc kubenswrapper[4998]: I0203 07:11:49.947287 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:50 crc kubenswrapper[4998]: I0203 07:11:50.022058 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:50 crc kubenswrapper[4998]: I0203 07:11:50.921901 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:50 crc kubenswrapper[4998]: I0203 07:11:50.965144 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:52 crc kubenswrapper[4998]: I0203 07:11:52.902008 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w4tbg" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="registry-server" containerID="cri-o://6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658" gracePeriod=2 Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.329468 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.480881 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities\") pod \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.481138 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content\") pod \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.481270 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b45fh\" (UniqueName: \"kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh\") pod \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\" (UID: \"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1\") " Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.482457 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities" (OuterVolumeSpecName: "utilities") pod "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" (UID: "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.488054 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh" (OuterVolumeSpecName: "kube-api-access-b45fh") pod "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" (UID: "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1"). InnerVolumeSpecName "kube-api-access-b45fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.537917 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" (UID: "0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.582658 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.582690 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.582702 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b45fh\" (UniqueName: \"kubernetes.io/projected/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1-kube-api-access-b45fh\") on node \"crc\" DevicePath \"\"" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.916062 4998 generic.go:334] "Generic (PLEG): container finished" podID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerID="6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658" exitCode=0 Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.916124 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerDied","Data":"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658"} Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.916188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4tbg" event={"ID":"0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1","Type":"ContainerDied","Data":"425272879ef164fd22ea17ea5c5c01010910e720ecc1564bceb4640a9d1ee557"} Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.916206 4998 scope.go:117] "RemoveContainer" containerID="6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.916223 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4tbg" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.942881 4998 scope.go:117] "RemoveContainer" containerID="aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0" Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.956699 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.962060 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-w4tbg"] Feb 03 07:11:53 crc kubenswrapper[4998]: I0203 07:11:53.985285 4998 scope.go:117] "RemoveContainer" containerID="ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.000407 4998 scope.go:117] "RemoveContainer" containerID="6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658" Feb 03 07:11:54 crc kubenswrapper[4998]: E0203 07:11:54.000934 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658\": container with ID starting with 6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658 not found: ID does not exist" containerID="6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.000972 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658"} err="failed to get container status \"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658\": rpc error: code = NotFound desc = could not find container \"6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658\": container with ID starting with 6cff21974c69ed332a06501a745b9eff4d9daad002b3f0b08542eddba44fa658 not found: ID does not exist" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.000998 4998 scope.go:117] "RemoveContainer" containerID="aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0" Feb 03 07:11:54 crc kubenswrapper[4998]: E0203 07:11:54.001284 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0\": container with ID starting with aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0 not found: ID does not exist" containerID="aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.001316 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0"} err="failed to get container status \"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0\": rpc error: code = NotFound desc = could not find container \"aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0\": container with ID starting with aca1df8642bb2fe30006b77635a4421fdb6af458f7e01a7833c3ad1d64780ef0 not found: ID does not exist" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.001337 4998 scope.go:117] "RemoveContainer" containerID="ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89" Feb 03 07:11:54 crc kubenswrapper[4998]: E0203 07:11:54.001630 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89\": container with ID starting with ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89 not found: ID does not exist" containerID="ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.001687 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89"} err="failed to get container status \"ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89\": rpc error: code = NotFound desc = could not find container \"ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89\": container with ID starting with ff116df1ea489bc9687583d02a444e54f528da748d83409651975bef1ea5ea89 not found: ID does not exist" Feb 03 07:11:54 crc kubenswrapper[4998]: I0203 07:11:54.445702 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" path="/var/lib/kubelet/pods/0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1/volumes" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.831815 4998 scope.go:117] "RemoveContainer" containerID="682e5a4bbf584c44a4069c9d5968a1ba452e90f8f5c4261070ad2e41451a7311" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.861299 4998 scope.go:117] "RemoveContainer" containerID="15969ca8a4285b78e3bba616374d40ab46d9a072768d82eb0eb8b367ef8200ab" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.894774 4998 scope.go:117] "RemoveContainer" containerID="9820beae08e9bb31b81429dd4b22978f152df4c77526ab762d9b068a4fb5c3cd" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.938087 4998 scope.go:117] "RemoveContainer" containerID="e061b29717ef5f300f95b7d0a0463d74793d4d1ac725755bcc7f09fcec627d2a" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.960006 4998 scope.go:117] "RemoveContainer" containerID="7b34f3b359b15d4babf364e4c5ad6f27c9f05f151f44d5e2d29ae10254e3e288" Feb 03 07:12:04 crc kubenswrapper[4998]: I0203 07:12:04.981706 4998 scope.go:117] "RemoveContainer" containerID="1ff4307050afd5c78538e5168c27ca50b3d89c99e4973a0736340d10a37a60f9" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.009170 4998 scope.go:117] "RemoveContainer" containerID="dddd8ed5fabdd6e303ec79235c5f2936c1faede50d32945f56a100075cd79738" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.024423 4998 scope.go:117] "RemoveContainer" containerID="4a1fdbdde27c80bfba169f3982695b5dbb450d92f7615a63f9245cfcb91e6ac8" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.055699 4998 scope.go:117] "RemoveContainer" containerID="ed451cc8d54143288975cbbac680ed008712b7ec6322d8711d81f0f4eeb8175d" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.076833 4998 scope.go:117] "RemoveContainer" containerID="ef295711b2e046d68735b381acaf0905d33919b33ca611ce4e5c1b3c06dbea10" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.093334 4998 scope.go:117] "RemoveContainer" containerID="9839f10fdf00322e85b9f0ff6cf51326745d8c4dee0f46e8b387490e0332e53e" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.109749 4998 scope.go:117] "RemoveContainer" containerID="98e79ca3fe1ad81caf41a89c459a6d4ecbc7ebae3b0750029e6181c1ec7c5830" Feb 03 07:12:05 crc kubenswrapper[4998]: I0203 07:12:05.125849 4998 scope.go:117] "RemoveContainer" containerID="838afd8286ed26e4ad4d792c8d67e2582c1b7b33d6d12d95ec79703f142f40cc" Feb 03 07:12:12 crc kubenswrapper[4998]: I0203 07:12:12.754245 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:12:12 crc kubenswrapper[4998]: I0203 07:12:12.754723 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:12:42 crc kubenswrapper[4998]: I0203 07:12:42.754555 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:12:42 crc kubenswrapper[4998]: I0203 07:12:42.755151 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.347361 4998 scope.go:117] "RemoveContainer" containerID="55db44c6ca814e57400d077a9a3a1ad5d68bd4413af30e653e178427453327c3" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.371797 4998 scope.go:117] "RemoveContainer" containerID="f564ff5de6b936819397c766913066c33690833aafd594b13d539dbbfd6cdeff" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.402253 4998 scope.go:117] "RemoveContainer" containerID="d8080e0f2ff14ade20f4e6cadd0a135007e045e6b2a43803d336fb15c52053a1" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.423242 4998 scope.go:117] "RemoveContainer" containerID="6a8eba1308322b6d1ba927de343d503c99e8ffd925e44e0eeea6bef5b588cd4e" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.456067 4998 scope.go:117] "RemoveContainer" containerID="97552a93fea48430bfcc329b6698e838a5b3ebfaa2c584142d4f3931b752d8a6" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.488943 4998 scope.go:117] "RemoveContainer" containerID="edcf28cbacee4673a08a4cd9b9c43c8f3f2b423ab921a62e22cbcc434d14d66c" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.509142 4998 scope.go:117] "RemoveContainer" containerID="f6359e2d805c5da84deb6d70b3a2cf1b151546db9cb67ce6157717737983ed9d" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.527959 4998 scope.go:117] "RemoveContainer" containerID="9ced9e4baa28785964c8c8ba5e7e0a08637bc094e3f3ac77a9a28b2e25f37501" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.563686 4998 scope.go:117] "RemoveContainer" containerID="8a11e53e794f1fad621650795f023987d0a74334697f2a88640796077ad63bc4" Feb 03 07:13:05 crc kubenswrapper[4998]: I0203 07:13:05.601169 4998 scope.go:117] "RemoveContainer" containerID="ac85fb57d382b091b0ec3df49b55d69c4fe4553fabf64a2bf4ffa916b408f777" Feb 03 07:13:12 crc kubenswrapper[4998]: I0203 07:13:12.754503 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:13:12 crc kubenswrapper[4998]: I0203 07:13:12.755089 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:13:12 crc kubenswrapper[4998]: I0203 07:13:12.755138 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:13:12 crc kubenswrapper[4998]: I0203 07:13:12.755753 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:13:12 crc kubenswrapper[4998]: I0203 07:13:12.755846 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" gracePeriod=600 Feb 03 07:13:12 crc kubenswrapper[4998]: E0203 07:13:12.875607 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:13:13 crc kubenswrapper[4998]: I0203 07:13:13.526278 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" exitCode=0 Feb 03 07:13:13 crc kubenswrapper[4998]: I0203 07:13:13.526362 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8"} Feb 03 07:13:13 crc kubenswrapper[4998]: I0203 07:13:13.527055 4998 scope.go:117] "RemoveContainer" containerID="9ab352b70406d28ffdcc38744a8dd4b0980c54ba576664d2d40fb907223c52c4" Feb 03 07:13:13 crc kubenswrapper[4998]: I0203 07:13:13.527600 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:13:13 crc kubenswrapper[4998]: E0203 07:13:13.527849 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:13:24 crc kubenswrapper[4998]: I0203 07:13:24.427609 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:13:24 crc kubenswrapper[4998]: E0203 07:13:24.428132 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:13:36 crc kubenswrapper[4998]: I0203 07:13:36.427860 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:13:36 crc kubenswrapper[4998]: E0203 07:13:36.429116 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:13:48 crc kubenswrapper[4998]: I0203 07:13:48.427942 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:13:48 crc kubenswrapper[4998]: E0203 07:13:48.429324 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:13:59 crc kubenswrapper[4998]: I0203 07:13:59.428291 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:13:59 crc kubenswrapper[4998]: E0203 07:13:59.429205 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.721012 4998 scope.go:117] "RemoveContainer" containerID="5d7d41cca39d4807855aca639882004c6a0546d3132beb684028e22eee232e64" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.743466 4998 scope.go:117] "RemoveContainer" containerID="33df0fedaa8a6ba9ced2dc1af5357329eb9b9365b1144e9121248a8b0882f833" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.759347 4998 scope.go:117] "RemoveContainer" containerID="52f356947435219b0263bbbd5ce037049ed715085f6b511a973666bee8f1da89" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.790228 4998 scope.go:117] "RemoveContainer" containerID="29722612d0982233b88843f2faf98b88c223434bc9d5d4ecba495fb7bf4b1ab7" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.827057 4998 scope.go:117] "RemoveContainer" containerID="e1ca138319732aaeed7b3eca0957a664db0ff4d7e9e21ac834109166de0a53de" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.846941 4998 scope.go:117] "RemoveContainer" containerID="505e445e1d61f5314d4840da4a3de1b3067fcf492e8165992044b9893ba78241" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.863558 4998 scope.go:117] "RemoveContainer" containerID="5b97509f0940b2ff84d32ba41eb37019e5a72aa74c7cd8d04ce4177519975f8f" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.879147 4998 scope.go:117] "RemoveContainer" containerID="bcba74e1e0494391ede410b57660f2f6f4c653650d3a0894b654391063d77965" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.894883 4998 scope.go:117] "RemoveContainer" containerID="54dbd2cecba320b2fc9213ed3e654126b77a791838b0470017988ad2d984d176" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.912854 4998 scope.go:117] "RemoveContainer" containerID="67f41d70a452f5e2318d430f8ca6dbbc9d678bac6ed6dbeb680ad9f6fab077e5" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.931520 4998 scope.go:117] "RemoveContainer" containerID="68b523af09a7e35d7d9fbb9ab1b0fab68d289b1089cd6f955bee40a1839a9c65" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.946610 4998 scope.go:117] "RemoveContainer" containerID="bdfed3c637d5a7e646aafe270bfdd002d1e9e70fc02fb8311d733fa26a2bd587" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.965392 4998 scope.go:117] "RemoveContainer" containerID="4a88c17630989ed8239da39c28bf5da3949e9fc5efa6d80626f8a6b62e6fe5df" Feb 03 07:14:05 crc kubenswrapper[4998]: I0203 07:14:05.984711 4998 scope.go:117] "RemoveContainer" containerID="399d45913cb9eab8eb1e4267fcd7e6d1cfdce55f450959649fbc69c1df47bced" Feb 03 07:14:14 crc kubenswrapper[4998]: I0203 07:14:14.428018 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:14:14 crc kubenswrapper[4998]: E0203 07:14:14.428755 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:14:25 crc kubenswrapper[4998]: I0203 07:14:25.427728 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:14:25 crc kubenswrapper[4998]: E0203 07:14:25.429509 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:14:37 crc kubenswrapper[4998]: I0203 07:14:37.428017 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:14:37 crc kubenswrapper[4998]: E0203 07:14:37.429708 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:14:49 crc kubenswrapper[4998]: I0203 07:14:49.428065 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:14:49 crc kubenswrapper[4998]: E0203 07:14:49.428894 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.150860 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:14:57 crc kubenswrapper[4998]: E0203 07:14:57.153000 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="extract-utilities" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.153119 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="extract-utilities" Feb 03 07:14:57 crc kubenswrapper[4998]: E0203 07:14:57.153228 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="extract-content" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.153355 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="extract-content" Feb 03 07:14:57 crc kubenswrapper[4998]: E0203 07:14:57.153465 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="registry-server" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.153546 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="registry-server" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.153900 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0992d0f5-d6b8-4ce6-b384-beb5b5a8d0f1" containerName="registry-server" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.155173 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.170691 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.235576 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.235668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s5t6\" (UniqueName: \"kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.235935 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.337742 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.337845 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.337898 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s5t6\" (UniqueName: \"kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.338286 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.338367 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.357319 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s5t6\" (UniqueName: \"kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6\") pod \"community-operators-6l9qj\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.473108 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.775092 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.779561 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.802396 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.808998 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.957951 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.958310 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kffsb\" (UniqueName: \"kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:57 crc kubenswrapper[4998]: I0203 07:14:57.958348 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.059620 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.059791 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.059844 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kffsb\" (UniqueName: \"kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.060321 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.060415 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.082298 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kffsb\" (UniqueName: \"kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb\") pod \"redhat-marketplace-7dv85\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.108272 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.289825 4998 generic.go:334] "Generic (PLEG): container finished" podID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerID="9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c" exitCode=0 Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.289880 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerDied","Data":"9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c"} Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.289915 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerStarted","Data":"0f2ebb95c4b1cd414163ee57edb7f450261eca84ff0a086a3cf267d241c0e956"} Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.292088 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:14:58 crc kubenswrapper[4998]: I0203 07:14:58.530930 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:14:58 crc kubenswrapper[4998]: W0203 07:14:58.541720 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod250a06cd_a850_45d6_aaf5_daa0f567c706.slice/crio-43b8ccf45ca29d8d0952dab78b8a310f0c9d27c3626cf907caf32b6bac63a180 WatchSource:0}: Error finding container 43b8ccf45ca29d8d0952dab78b8a310f0c9d27c3626cf907caf32b6bac63a180: Status 404 returned error can't find the container with id 43b8ccf45ca29d8d0952dab78b8a310f0c9d27c3626cf907caf32b6bac63a180 Feb 03 07:14:59 crc kubenswrapper[4998]: I0203 07:14:59.300835 4998 generic.go:334] "Generic (PLEG): container finished" podID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerID="1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94" exitCode=0 Feb 03 07:14:59 crc kubenswrapper[4998]: I0203 07:14:59.300921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerDied","Data":"1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94"} Feb 03 07:14:59 crc kubenswrapper[4998]: I0203 07:14:59.303983 4998 generic.go:334] "Generic (PLEG): container finished" podID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerID="2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9" exitCode=0 Feb 03 07:14:59 crc kubenswrapper[4998]: I0203 07:14:59.304031 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerDied","Data":"2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9"} Feb 03 07:14:59 crc kubenswrapper[4998]: I0203 07:14:59.304059 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerStarted","Data":"43b8ccf45ca29d8d0952dab78b8a310f0c9d27c3626cf907caf32b6bac63a180"} Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.146256 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4"] Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.147404 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.156640 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4"] Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.158766 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.159998 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.290830 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.291071 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52gn5\" (UniqueName: \"kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.291263 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.311752 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerStarted","Data":"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066"} Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.314530 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerStarted","Data":"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930"} Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.369270 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6l9qj" podStartSLOduration=1.9411061950000001 podStartE2EDuration="3.369228525s" podCreationTimestamp="2026-02-03 07:14:57 +0000 UTC" firstStartedPulling="2026-02-03 07:14:58.291796964 +0000 UTC m=+1736.578490770" lastFinishedPulling="2026-02-03 07:14:59.719919294 +0000 UTC m=+1738.006613100" observedRunningTime="2026-02-03 07:15:00.361174168 +0000 UTC m=+1738.647867974" watchObservedRunningTime="2026-02-03 07:15:00.369228525 +0000 UTC m=+1738.655922341" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.393151 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.393532 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.393646 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52gn5\" (UniqueName: \"kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.394511 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.401228 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.410995 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52gn5\" (UniqueName: \"kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5\") pod \"collect-profiles-29501715-8cnh4\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.468027 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:00 crc kubenswrapper[4998]: I0203 07:15:00.941148 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4"] Feb 03 07:15:01 crc kubenswrapper[4998]: I0203 07:15:01.321905 4998 generic.go:334] "Generic (PLEG): container finished" podID="9e2485eb-fa88-4a87-b195-1b3dffc5075d" containerID="9af62ff8931479f3b4eabeb56f5dab8491a57e44911d3154f347fc10709e499e" exitCode=0 Feb 03 07:15:01 crc kubenswrapper[4998]: I0203 07:15:01.321992 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" event={"ID":"9e2485eb-fa88-4a87-b195-1b3dffc5075d","Type":"ContainerDied","Data":"9af62ff8931479f3b4eabeb56f5dab8491a57e44911d3154f347fc10709e499e"} Feb 03 07:15:01 crc kubenswrapper[4998]: I0203 07:15:01.322021 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" event={"ID":"9e2485eb-fa88-4a87-b195-1b3dffc5075d","Type":"ContainerStarted","Data":"0ed3f5195d8f540c0e0b578841966e2f933914084442ae5b2629522f81f2c012"} Feb 03 07:15:01 crc kubenswrapper[4998]: I0203 07:15:01.324235 4998 generic.go:334] "Generic (PLEG): container finished" podID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerID="6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066" exitCode=0 Feb 03 07:15:01 crc kubenswrapper[4998]: I0203 07:15:01.324326 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerDied","Data":"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066"} Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.334441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerStarted","Data":"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0"} Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.371276 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7dv85" podStartSLOduration=2.769037097 podStartE2EDuration="5.37125085s" podCreationTimestamp="2026-02-03 07:14:57 +0000 UTC" firstStartedPulling="2026-02-03 07:14:59.311117378 +0000 UTC m=+1737.597811194" lastFinishedPulling="2026-02-03 07:15:01.913331141 +0000 UTC m=+1740.200024947" observedRunningTime="2026-02-03 07:15:02.356221187 +0000 UTC m=+1740.642915053" watchObservedRunningTime="2026-02-03 07:15:02.37125085 +0000 UTC m=+1740.657944686" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.441996 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:15:02 crc kubenswrapper[4998]: E0203 07:15:02.442474 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.615701 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.726734 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume\") pod \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.726909 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume\") pod \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.726929 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52gn5\" (UniqueName: \"kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5\") pod \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\" (UID: \"9e2485eb-fa88-4a87-b195-1b3dffc5075d\") " Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.727620 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume" (OuterVolumeSpecName: "config-volume") pod "9e2485eb-fa88-4a87-b195-1b3dffc5075d" (UID: "9e2485eb-fa88-4a87-b195-1b3dffc5075d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.732252 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9e2485eb-fa88-4a87-b195-1b3dffc5075d" (UID: "9e2485eb-fa88-4a87-b195-1b3dffc5075d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.732443 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5" (OuterVolumeSpecName: "kube-api-access-52gn5") pod "9e2485eb-fa88-4a87-b195-1b3dffc5075d" (UID: "9e2485eb-fa88-4a87-b195-1b3dffc5075d"). InnerVolumeSpecName "kube-api-access-52gn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.828377 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e2485eb-fa88-4a87-b195-1b3dffc5075d-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.828422 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52gn5\" (UniqueName: \"kubernetes.io/projected/9e2485eb-fa88-4a87-b195-1b3dffc5075d-kube-api-access-52gn5\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4998]: I0203 07:15:02.828432 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e2485eb-fa88-4a87-b195-1b3dffc5075d-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:03 crc kubenswrapper[4998]: I0203 07:15:03.342015 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" event={"ID":"9e2485eb-fa88-4a87-b195-1b3dffc5075d","Type":"ContainerDied","Data":"0ed3f5195d8f540c0e0b578841966e2f933914084442ae5b2629522f81f2c012"} Feb 03 07:15:03 crc kubenswrapper[4998]: I0203 07:15:03.342052 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ed3f5195d8f540c0e0b578841966e2f933914084442ae5b2629522f81f2c012" Feb 03 07:15:03 crc kubenswrapper[4998]: I0203 07:15:03.342057 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4" Feb 03 07:15:06 crc kubenswrapper[4998]: I0203 07:15:06.066317 4998 scope.go:117] "RemoveContainer" containerID="7a850fda6a5668f7f1d0f032de0d6df1c4a6d33f4f9ca91cea233a9b53646025" Feb 03 07:15:07 crc kubenswrapper[4998]: I0203 07:15:07.473394 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:07 crc kubenswrapper[4998]: I0203 07:15:07.473699 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:07 crc kubenswrapper[4998]: I0203 07:15:07.512514 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.108932 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.109850 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.149689 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.415124 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.415372 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:08 crc kubenswrapper[4998]: I0203 07:15:08.955060 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.395960 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6l9qj" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="registry-server" containerID="cri-o://db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930" gracePeriod=2 Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.750971 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.784116 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.947613 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities\") pod \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.947768 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s5t6\" (UniqueName: \"kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6\") pod \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.947864 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content\") pod \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\" (UID: \"3103bb3e-fb5b-4479-b90a-1cb9bc86976a\") " Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.948585 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities" (OuterVolumeSpecName: "utilities") pod "3103bb3e-fb5b-4479-b90a-1cb9bc86976a" (UID: "3103bb3e-fb5b-4479-b90a-1cb9bc86976a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4998]: I0203 07:15:10.953498 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6" (OuterVolumeSpecName: "kube-api-access-2s5t6") pod "3103bb3e-fb5b-4479-b90a-1cb9bc86976a" (UID: "3103bb3e-fb5b-4479-b90a-1cb9bc86976a"). InnerVolumeSpecName "kube-api-access-2s5t6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.010610 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3103bb3e-fb5b-4479-b90a-1cb9bc86976a" (UID: "3103bb3e-fb5b-4479-b90a-1cb9bc86976a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.050206 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.050252 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.050266 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s5t6\" (UniqueName: \"kubernetes.io/projected/3103bb3e-fb5b-4479-b90a-1cb9bc86976a-kube-api-access-2s5t6\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405174 4998 generic.go:334] "Generic (PLEG): container finished" podID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerID="db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930" exitCode=0 Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405227 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerDied","Data":"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930"} Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405295 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l9qj" event={"ID":"3103bb3e-fb5b-4479-b90a-1cb9bc86976a","Type":"ContainerDied","Data":"0f2ebb95c4b1cd414163ee57edb7f450261eca84ff0a086a3cf267d241c0e956"} Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405327 4998 scope.go:117] "RemoveContainer" containerID="db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405320 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l9qj" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.405382 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7dv85" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="registry-server" containerID="cri-o://bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0" gracePeriod=2 Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.427690 4998 scope.go:117] "RemoveContainer" containerID="1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.439884 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.446218 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6l9qj"] Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.456093 4998 scope.go:117] "RemoveContainer" containerID="9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.559730 4998 scope.go:117] "RemoveContainer" containerID="db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930" Feb 03 07:15:11 crc kubenswrapper[4998]: E0203 07:15:11.562893 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930\": container with ID starting with db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930 not found: ID does not exist" containerID="db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.562957 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930"} err="failed to get container status \"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930\": rpc error: code = NotFound desc = could not find container \"db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930\": container with ID starting with db55347d24a7239a2c4412cff4b1cde74136d056a667469efff8d34729639930 not found: ID does not exist" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.562990 4998 scope.go:117] "RemoveContainer" containerID="1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94" Feb 03 07:15:11 crc kubenswrapper[4998]: E0203 07:15:11.563336 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94\": container with ID starting with 1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94 not found: ID does not exist" containerID="1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.563371 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94"} err="failed to get container status \"1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94\": rpc error: code = NotFound desc = could not find container \"1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94\": container with ID starting with 1b833b82188d11284d153ba06dcfa206738eaee17e55aafd1f6038a363554e94 not found: ID does not exist" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.563401 4998 scope.go:117] "RemoveContainer" containerID="9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c" Feb 03 07:15:11 crc kubenswrapper[4998]: E0203 07:15:11.563660 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c\": container with ID starting with 9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c not found: ID does not exist" containerID="9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.563688 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c"} err="failed to get container status \"9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c\": rpc error: code = NotFound desc = could not find container \"9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c\": container with ID starting with 9bea0f53558e766ccb498ba52a82dac25c7da119a39f2f181fcc55d3285f5b7c not found: ID does not exist" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.775739 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.859580 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kffsb\" (UniqueName: \"kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb\") pod \"250a06cd-a850-45d6-aaf5-daa0f567c706\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.859640 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities\") pod \"250a06cd-a850-45d6-aaf5-daa0f567c706\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.859737 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content\") pod \"250a06cd-a850-45d6-aaf5-daa0f567c706\" (UID: \"250a06cd-a850-45d6-aaf5-daa0f567c706\") " Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.861644 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities" (OuterVolumeSpecName: "utilities") pod "250a06cd-a850-45d6-aaf5-daa0f567c706" (UID: "250a06cd-a850-45d6-aaf5-daa0f567c706"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.865202 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb" (OuterVolumeSpecName: "kube-api-access-kffsb") pod "250a06cd-a850-45d6-aaf5-daa0f567c706" (UID: "250a06cd-a850-45d6-aaf5-daa0f567c706"). InnerVolumeSpecName "kube-api-access-kffsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.888999 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "250a06cd-a850-45d6-aaf5-daa0f567c706" (UID: "250a06cd-a850-45d6-aaf5-daa0f567c706"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.960957 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kffsb\" (UniqueName: \"kubernetes.io/projected/250a06cd-a850-45d6-aaf5-daa0f567c706-kube-api-access-kffsb\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.960998 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4998]: I0203 07:15:11.961010 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/250a06cd-a850-45d6-aaf5-daa0f567c706-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.419909 4998 generic.go:334] "Generic (PLEG): container finished" podID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerID="bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0" exitCode=0 Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.419968 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerDied","Data":"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0"} Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.420010 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dv85" event={"ID":"250a06cd-a850-45d6-aaf5-daa0f567c706","Type":"ContainerDied","Data":"43b8ccf45ca29d8d0952dab78b8a310f0c9d27c3626cf907caf32b6bac63a180"} Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.420032 4998 scope.go:117] "RemoveContainer" containerID="bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.420855 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dv85" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.438427 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" path="/var/lib/kubelet/pods/3103bb3e-fb5b-4479-b90a-1cb9bc86976a/volumes" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.446393 4998 scope.go:117] "RemoveContainer" containerID="6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.462299 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.468109 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dv85"] Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.491455 4998 scope.go:117] "RemoveContainer" containerID="2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.514951 4998 scope.go:117] "RemoveContainer" containerID="bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0" Feb 03 07:15:12 crc kubenswrapper[4998]: E0203 07:15:12.516593 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0\": container with ID starting with bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0 not found: ID does not exist" containerID="bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.516681 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0"} err="failed to get container status \"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0\": rpc error: code = NotFound desc = could not find container \"bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0\": container with ID starting with bb72b709a2d714544c6f5330cc9dc2d36603cbf3ed99b46d355383ddcfe7c0d0 not found: ID does not exist" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.516707 4998 scope.go:117] "RemoveContainer" containerID="6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066" Feb 03 07:15:12 crc kubenswrapper[4998]: E0203 07:15:12.516944 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066\": container with ID starting with 6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066 not found: ID does not exist" containerID="6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.516968 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066"} err="failed to get container status \"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066\": rpc error: code = NotFound desc = could not find container \"6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066\": container with ID starting with 6e2bf1ee00fd17b5d6e4d95d3104e92cea9d27f382ab1debe88491c68b4f9066 not found: ID does not exist" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.516982 4998 scope.go:117] "RemoveContainer" containerID="2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9" Feb 03 07:15:12 crc kubenswrapper[4998]: E0203 07:15:12.517254 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9\": container with ID starting with 2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9 not found: ID does not exist" containerID="2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9" Feb 03 07:15:12 crc kubenswrapper[4998]: I0203 07:15:12.517279 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9"} err="failed to get container status \"2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9\": rpc error: code = NotFound desc = could not find container \"2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9\": container with ID starting with 2c0edbc691736aa825abf73108dac3f302d132935499b3b902e15171c3819ea9 not found: ID does not exist" Feb 03 07:15:14 crc kubenswrapper[4998]: I0203 07:15:14.428543 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:15:14 crc kubenswrapper[4998]: E0203 07:15:14.428976 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:15:14 crc kubenswrapper[4998]: I0203 07:15:14.439962 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" path="/var/lib/kubelet/pods/250a06cd-a850-45d6-aaf5-daa0f567c706/volumes" Feb 03 07:15:29 crc kubenswrapper[4998]: I0203 07:15:29.427412 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:15:29 crc kubenswrapper[4998]: E0203 07:15:29.428365 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:15:40 crc kubenswrapper[4998]: I0203 07:15:40.428284 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:15:40 crc kubenswrapper[4998]: E0203 07:15:40.429479 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:15:52 crc kubenswrapper[4998]: I0203 07:15:52.433168 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:15:52 crc kubenswrapper[4998]: E0203 07:15:52.433672 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:16:06 crc kubenswrapper[4998]: I0203 07:16:06.427809 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:16:06 crc kubenswrapper[4998]: E0203 07:16:06.428587 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:16:21 crc kubenswrapper[4998]: I0203 07:16:21.427759 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:16:21 crc kubenswrapper[4998]: E0203 07:16:21.428584 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:16:34 crc kubenswrapper[4998]: I0203 07:16:34.427332 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:16:34 crc kubenswrapper[4998]: E0203 07:16:34.428287 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:16:46 crc kubenswrapper[4998]: I0203 07:16:46.427756 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:16:46 crc kubenswrapper[4998]: E0203 07:16:46.428434 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:16:58 crc kubenswrapper[4998]: I0203 07:16:58.427440 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:16:58 crc kubenswrapper[4998]: E0203 07:16:58.427962 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:17:11 crc kubenswrapper[4998]: I0203 07:17:11.427318 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:17:11 crc kubenswrapper[4998]: E0203 07:17:11.428102 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:17:25 crc kubenswrapper[4998]: I0203 07:17:25.428377 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:17:25 crc kubenswrapper[4998]: E0203 07:17:25.429261 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:17:38 crc kubenswrapper[4998]: I0203 07:17:38.427799 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:17:38 crc kubenswrapper[4998]: E0203 07:17:38.428505 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:17:51 crc kubenswrapper[4998]: I0203 07:17:51.427589 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:17:51 crc kubenswrapper[4998]: E0203 07:17:51.428413 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:18:04 crc kubenswrapper[4998]: I0203 07:18:04.427603 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:18:04 crc kubenswrapper[4998]: E0203 07:18:04.428328 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:18:17 crc kubenswrapper[4998]: I0203 07:18:17.427706 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:18:17 crc kubenswrapper[4998]: I0203 07:18:17.669947 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85"} Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.204892 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.206908 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="extract-utilities" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207014 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="extract-utilities" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207115 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207197 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207293 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="extract-content" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207377 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="extract-content" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207469 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e2485eb-fa88-4a87-b195-1b3dffc5075d" containerName="collect-profiles" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207545 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e2485eb-fa88-4a87-b195-1b3dffc5075d" containerName="collect-profiles" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207639 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="extract-content" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207654 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="extract-content" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207669 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207676 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: E0203 07:19:53.207701 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="extract-utilities" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207708 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="extract-utilities" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207881 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="250a06cd-a850-45d6-aaf5-daa0f567c706" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207898 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3103bb3e-fb5b-4479-b90a-1cb9bc86976a" containerName="registry-server" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.207916 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e2485eb-fa88-4a87-b195-1b3dffc5075d" containerName="collect-profiles" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.209002 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.211142 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.351686 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.351742 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.352321 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l57c7\" (UniqueName: \"kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.454183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l57c7\" (UniqueName: \"kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.454338 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.454372 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.456036 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.456054 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.485184 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l57c7\" (UniqueName: \"kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7\") pod \"redhat-operators-vvzxn\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.528388 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:19:53 crc kubenswrapper[4998]: I0203 07:19:53.968729 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:19:54 crc kubenswrapper[4998]: I0203 07:19:54.294176 4998 generic.go:334] "Generic (PLEG): container finished" podID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerID="75cf49eefe140d510f186326b4d7e363eb7a7661a8df44ebe5f89ff309dfb168" exitCode=0 Feb 03 07:19:54 crc kubenswrapper[4998]: I0203 07:19:54.294234 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerDied","Data":"75cf49eefe140d510f186326b4d7e363eb7a7661a8df44ebe5f89ff309dfb168"} Feb 03 07:19:54 crc kubenswrapper[4998]: I0203 07:19:54.294269 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerStarted","Data":"099eb3dd37826d8da2433454976d8a7ff903af7da9c5aff466c14be3270d42e6"} Feb 03 07:19:56 crc kubenswrapper[4998]: I0203 07:19:56.309403 4998 generic.go:334] "Generic (PLEG): container finished" podID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerID="824b68728a9ad9d08317a8b72bf6f4fa6bf8a8677f7a6b42a447d53af755d32c" exitCode=0 Feb 03 07:19:56 crc kubenswrapper[4998]: I0203 07:19:56.309534 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerDied","Data":"824b68728a9ad9d08317a8b72bf6f4fa6bf8a8677f7a6b42a447d53af755d32c"} Feb 03 07:19:58 crc kubenswrapper[4998]: I0203 07:19:58.325241 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerStarted","Data":"cba108de09481747f02db2a6f55dd12390e3c99e9feb355cdb59a0671aacb86f"} Feb 03 07:19:58 crc kubenswrapper[4998]: I0203 07:19:58.351232 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vvzxn" podStartSLOduration=2.452228442 podStartE2EDuration="5.351216145s" podCreationTimestamp="2026-02-03 07:19:53 +0000 UTC" firstStartedPulling="2026-02-03 07:19:54.296916861 +0000 UTC m=+2032.583610667" lastFinishedPulling="2026-02-03 07:19:57.195904554 +0000 UTC m=+2035.482598370" observedRunningTime="2026-02-03 07:19:58.350956488 +0000 UTC m=+2036.637650314" watchObservedRunningTime="2026-02-03 07:19:58.351216145 +0000 UTC m=+2036.637909951" Feb 03 07:20:03 crc kubenswrapper[4998]: I0203 07:20:03.529655 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:03 crc kubenswrapper[4998]: I0203 07:20:03.530038 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:03 crc kubenswrapper[4998]: I0203 07:20:03.573140 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:04 crc kubenswrapper[4998]: I0203 07:20:04.413370 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:04 crc kubenswrapper[4998]: I0203 07:20:04.452935 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:20:06 crc kubenswrapper[4998]: I0203 07:20:06.383252 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vvzxn" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="registry-server" containerID="cri-o://cba108de09481747f02db2a6f55dd12390e3c99e9feb355cdb59a0671aacb86f" gracePeriod=2 Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.403845 4998 generic.go:334] "Generic (PLEG): container finished" podID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerID="cba108de09481747f02db2a6f55dd12390e3c99e9feb355cdb59a0671aacb86f" exitCode=0 Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.403944 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerDied","Data":"cba108de09481747f02db2a6f55dd12390e3c99e9feb355cdb59a0671aacb86f"} Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.638098 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.770689 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l57c7\" (UniqueName: \"kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7\") pod \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.771069 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content\") pod \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.771108 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities\") pod \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\" (UID: \"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696\") " Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.772367 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities" (OuterVolumeSpecName: "utilities") pod "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" (UID: "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.779812 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7" (OuterVolumeSpecName: "kube-api-access-l57c7") pod "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" (UID: "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696"). InnerVolumeSpecName "kube-api-access-l57c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.872580 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.872616 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l57c7\" (UniqueName: \"kubernetes.io/projected/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-kube-api-access-l57c7\") on node \"crc\" DevicePath \"\"" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.901411 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" (UID: "aabf6cc6-b6a4-4ca2-90c2-7de577bc6696"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:20:08 crc kubenswrapper[4998]: I0203 07:20:08.974036 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.412768 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvzxn" event={"ID":"aabf6cc6-b6a4-4ca2-90c2-7de577bc6696","Type":"ContainerDied","Data":"099eb3dd37826d8da2433454976d8a7ff903af7da9c5aff466c14be3270d42e6"} Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.412856 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvzxn" Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.412890 4998 scope.go:117] "RemoveContainer" containerID="cba108de09481747f02db2a6f55dd12390e3c99e9feb355cdb59a0671aacb86f" Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.431878 4998 scope.go:117] "RemoveContainer" containerID="824b68728a9ad9d08317a8b72bf6f4fa6bf8a8677f7a6b42a447d53af755d32c" Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.457651 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.464275 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vvzxn"] Feb 03 07:20:09 crc kubenswrapper[4998]: I0203 07:20:09.468683 4998 scope.go:117] "RemoveContainer" containerID="75cf49eefe140d510f186326b4d7e363eb7a7661a8df44ebe5f89ff309dfb168" Feb 03 07:20:10 crc kubenswrapper[4998]: I0203 07:20:10.441430 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" path="/var/lib/kubelet/pods/aabf6cc6-b6a4-4ca2-90c2-7de577bc6696/volumes" Feb 03 07:20:42 crc kubenswrapper[4998]: I0203 07:20:42.754077 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:20:42 crc kubenswrapper[4998]: I0203 07:20:42.754674 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:21:12 crc kubenswrapper[4998]: I0203 07:21:12.754505 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:21:12 crc kubenswrapper[4998]: I0203 07:21:12.755093 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:21:42 crc kubenswrapper[4998]: I0203 07:21:42.754544 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:21:42 crc kubenswrapper[4998]: I0203 07:21:42.755181 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:21:42 crc kubenswrapper[4998]: I0203 07:21:42.755238 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:21:42 crc kubenswrapper[4998]: I0203 07:21:42.756008 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:21:42 crc kubenswrapper[4998]: I0203 07:21:42.756103 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85" gracePeriod=600 Feb 03 07:21:43 crc kubenswrapper[4998]: I0203 07:21:43.128205 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85" exitCode=0 Feb 03 07:21:43 crc kubenswrapper[4998]: I0203 07:21:43.128258 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85"} Feb 03 07:21:43 crc kubenswrapper[4998]: I0203 07:21:43.128594 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e"} Feb 03 07:21:43 crc kubenswrapper[4998]: I0203 07:21:43.128615 4998 scope.go:117] "RemoveContainer" containerID="b1a8e64af6c0323b631a5d0e804eb508d44c7d65569e25637152c708c50e98a8" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.799339 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:15 crc kubenswrapper[4998]: E0203 07:22:15.800290 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="extract-content" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.800307 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="extract-content" Feb 03 07:22:15 crc kubenswrapper[4998]: E0203 07:22:15.800331 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="registry-server" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.800340 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="registry-server" Feb 03 07:22:15 crc kubenswrapper[4998]: E0203 07:22:15.800356 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="extract-utilities" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.800366 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="extract-utilities" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.800557 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="aabf6cc6-b6a4-4ca2-90c2-7de577bc6696" containerName="registry-server" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.801948 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.806502 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.906481 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.906542 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:15 crc kubenswrapper[4998]: I0203 07:22:15.906575 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbq6b\" (UniqueName: \"kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.008033 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.008120 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.008150 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbq6b\" (UniqueName: \"kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.009121 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.009379 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.026463 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbq6b\" (UniqueName: \"kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b\") pod \"certified-operators-4xhw8\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.134530 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:16 crc kubenswrapper[4998]: I0203 07:22:16.623889 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:17 crc kubenswrapper[4998]: I0203 07:22:17.368264 4998 generic.go:334] "Generic (PLEG): container finished" podID="83b59597-8f90-4b4d-af75-4f563c56176f" containerID="a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e" exitCode=0 Feb 03 07:22:17 crc kubenswrapper[4998]: I0203 07:22:17.368308 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerDied","Data":"a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e"} Feb 03 07:22:17 crc kubenswrapper[4998]: I0203 07:22:17.368543 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerStarted","Data":"174a7307e55b5694dbc1b030ed6c27da0f0c11bc8976c7a6f4f8abd222844ac1"} Feb 03 07:22:17 crc kubenswrapper[4998]: I0203 07:22:17.370711 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:22:18 crc kubenswrapper[4998]: I0203 07:22:18.376368 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerStarted","Data":"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c"} Feb 03 07:22:19 crc kubenswrapper[4998]: I0203 07:22:19.385219 4998 generic.go:334] "Generic (PLEG): container finished" podID="83b59597-8f90-4b4d-af75-4f563c56176f" containerID="a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c" exitCode=0 Feb 03 07:22:19 crc kubenswrapper[4998]: I0203 07:22:19.385276 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerDied","Data":"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c"} Feb 03 07:22:20 crc kubenswrapper[4998]: I0203 07:22:20.397840 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerStarted","Data":"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd"} Feb 03 07:22:20 crc kubenswrapper[4998]: I0203 07:22:20.423694 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4xhw8" podStartSLOduration=2.944509822 podStartE2EDuration="5.423670426s" podCreationTimestamp="2026-02-03 07:22:15 +0000 UTC" firstStartedPulling="2026-02-03 07:22:17.370360208 +0000 UTC m=+2175.657054014" lastFinishedPulling="2026-02-03 07:22:19.849520812 +0000 UTC m=+2178.136214618" observedRunningTime="2026-02-03 07:22:20.414127385 +0000 UTC m=+2178.700821191" watchObservedRunningTime="2026-02-03 07:22:20.423670426 +0000 UTC m=+2178.710364232" Feb 03 07:22:26 crc kubenswrapper[4998]: I0203 07:22:26.136331 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:26 crc kubenswrapper[4998]: I0203 07:22:26.136734 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:26 crc kubenswrapper[4998]: I0203 07:22:26.177491 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:26 crc kubenswrapper[4998]: I0203 07:22:26.483724 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:26 crc kubenswrapper[4998]: I0203 07:22:26.538205 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.458100 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4xhw8" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="registry-server" containerID="cri-o://8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd" gracePeriod=2 Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.824272 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.985425 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities\") pod \"83b59597-8f90-4b4d-af75-4f563c56176f\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.985876 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbq6b\" (UniqueName: \"kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b\") pod \"83b59597-8f90-4b4d-af75-4f563c56176f\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.985974 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content\") pod \"83b59597-8f90-4b4d-af75-4f563c56176f\" (UID: \"83b59597-8f90-4b4d-af75-4f563c56176f\") " Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.987835 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities" (OuterVolumeSpecName: "utilities") pod "83b59597-8f90-4b4d-af75-4f563c56176f" (UID: "83b59597-8f90-4b4d-af75-4f563c56176f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:22:28 crc kubenswrapper[4998]: I0203 07:22:28.993073 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b" (OuterVolumeSpecName: "kube-api-access-dbq6b") pod "83b59597-8f90-4b4d-af75-4f563c56176f" (UID: "83b59597-8f90-4b4d-af75-4f563c56176f"). InnerVolumeSpecName "kube-api-access-dbq6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.088002 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.088434 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbq6b\" (UniqueName: \"kubernetes.io/projected/83b59597-8f90-4b4d-af75-4f563c56176f-kube-api-access-dbq6b\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.465885 4998 generic.go:334] "Generic (PLEG): container finished" podID="83b59597-8f90-4b4d-af75-4f563c56176f" containerID="8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd" exitCode=0 Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.465930 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerDied","Data":"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd"} Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.465965 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4xhw8" event={"ID":"83b59597-8f90-4b4d-af75-4f563c56176f","Type":"ContainerDied","Data":"174a7307e55b5694dbc1b030ed6c27da0f0c11bc8976c7a6f4f8abd222844ac1"} Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.465986 4998 scope.go:117] "RemoveContainer" containerID="8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.465934 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4xhw8" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.481268 4998 scope.go:117] "RemoveContainer" containerID="a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.481862 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83b59597-8f90-4b4d-af75-4f563c56176f" (UID: "83b59597-8f90-4b4d-af75-4f563c56176f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.494562 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b59597-8f90-4b4d-af75-4f563c56176f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.500060 4998 scope.go:117] "RemoveContainer" containerID="a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.517801 4998 scope.go:117] "RemoveContainer" containerID="8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd" Feb 03 07:22:29 crc kubenswrapper[4998]: E0203 07:22:29.518240 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd\": container with ID starting with 8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd not found: ID does not exist" containerID="8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.518295 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd"} err="failed to get container status \"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd\": rpc error: code = NotFound desc = could not find container \"8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd\": container with ID starting with 8d1e87be82ea9667bceb762fca83a43f6a7f058ba170cc867a411549b49e6ecd not found: ID does not exist" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.518334 4998 scope.go:117] "RemoveContainer" containerID="a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c" Feb 03 07:22:29 crc kubenswrapper[4998]: E0203 07:22:29.518621 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c\": container with ID starting with a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c not found: ID does not exist" containerID="a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.518653 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c"} err="failed to get container status \"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c\": rpc error: code = NotFound desc = could not find container \"a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c\": container with ID starting with a37edf322266d0a1a21a972cca3eaf50b94d5775ade0123095af09862241d20c not found: ID does not exist" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.518673 4998 scope.go:117] "RemoveContainer" containerID="a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e" Feb 03 07:22:29 crc kubenswrapper[4998]: E0203 07:22:29.518946 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e\": container with ID starting with a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e not found: ID does not exist" containerID="a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.518987 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e"} err="failed to get container status \"a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e\": rpc error: code = NotFound desc = could not find container \"a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e\": container with ID starting with a4abbd568ac2483f4cf2f221c7e9eeafdeb61bd3d6ed1382022f3b31c17aea8e not found: ID does not exist" Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.801457 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:29 crc kubenswrapper[4998]: I0203 07:22:29.822325 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4xhw8"] Feb 03 07:22:30 crc kubenswrapper[4998]: I0203 07:22:30.435941 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" path="/var/lib/kubelet/pods/83b59597-8f90-4b4d-af75-4f563c56176f/volumes" Feb 03 07:24:12 crc kubenswrapper[4998]: I0203 07:24:12.754851 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:24:12 crc kubenswrapper[4998]: I0203 07:24:12.755391 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:24:42 crc kubenswrapper[4998]: I0203 07:24:42.754179 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:24:42 crc kubenswrapper[4998]: I0203 07:24:42.754886 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.312585 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:24:59 crc kubenswrapper[4998]: E0203 07:24:59.313468 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="registry-server" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.313483 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="registry-server" Feb 03 07:24:59 crc kubenswrapper[4998]: E0203 07:24:59.313515 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="extract-utilities" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.313524 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="extract-utilities" Feb 03 07:24:59 crc kubenswrapper[4998]: E0203 07:24:59.313537 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="extract-content" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.313547 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="extract-content" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.313713 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b59597-8f90-4b4d-af75-4f563c56176f" containerName="registry-server" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.315186 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.327030 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.504048 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.504320 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.504438 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7xkt\" (UniqueName: \"kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.605709 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.605808 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7xkt\" (UniqueName: \"kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.605859 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.606408 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.606478 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.628547 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7xkt\" (UniqueName: \"kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt\") pod \"redhat-marketplace-zffz4\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.634864 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:24:59 crc kubenswrapper[4998]: I0203 07:24:59.914559 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:25:00 crc kubenswrapper[4998]: I0203 07:25:00.580626 4998 generic.go:334] "Generic (PLEG): container finished" podID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerID="33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac" exitCode=0 Feb 03 07:25:00 crc kubenswrapper[4998]: I0203 07:25:00.580694 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerDied","Data":"33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac"} Feb 03 07:25:00 crc kubenswrapper[4998]: I0203 07:25:00.580963 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerStarted","Data":"93a6ffe3de631f6cf59d61e43e302af694a2b44dcb9a34f999eeea1c75ca0615"} Feb 03 07:25:01 crc kubenswrapper[4998]: I0203 07:25:01.589273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerStarted","Data":"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1"} Feb 03 07:25:02 crc kubenswrapper[4998]: I0203 07:25:02.598194 4998 generic.go:334] "Generic (PLEG): container finished" podID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerID="70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1" exitCode=0 Feb 03 07:25:02 crc kubenswrapper[4998]: I0203 07:25:02.598309 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerDied","Data":"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1"} Feb 03 07:25:03 crc kubenswrapper[4998]: I0203 07:25:03.606542 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerStarted","Data":"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c"} Feb 03 07:25:03 crc kubenswrapper[4998]: I0203 07:25:03.626559 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zffz4" podStartSLOduration=2.196530884 podStartE2EDuration="4.626537101s" podCreationTimestamp="2026-02-03 07:24:59 +0000 UTC" firstStartedPulling="2026-02-03 07:25:00.582850496 +0000 UTC m=+2338.869544302" lastFinishedPulling="2026-02-03 07:25:03.012856713 +0000 UTC m=+2341.299550519" observedRunningTime="2026-02-03 07:25:03.622958609 +0000 UTC m=+2341.909652435" watchObservedRunningTime="2026-02-03 07:25:03.626537101 +0000 UTC m=+2341.913230917" Feb 03 07:25:09 crc kubenswrapper[4998]: I0203 07:25:09.635744 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:09 crc kubenswrapper[4998]: I0203 07:25:09.636236 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:09 crc kubenswrapper[4998]: I0203 07:25:09.676944 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:09 crc kubenswrapper[4998]: I0203 07:25:09.718909 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:09 crc kubenswrapper[4998]: I0203 07:25:09.912025 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:25:11 crc kubenswrapper[4998]: I0203 07:25:11.660402 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zffz4" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="registry-server" containerID="cri-o://20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c" gracePeriod=2 Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.049839 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.085558 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities\") pod \"e512f526-1785-4b1e-b180-312a7cb5b7b8\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.085605 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content\") pod \"e512f526-1785-4b1e-b180-312a7cb5b7b8\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.085728 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7xkt\" (UniqueName: \"kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt\") pod \"e512f526-1785-4b1e-b180-312a7cb5b7b8\" (UID: \"e512f526-1785-4b1e-b180-312a7cb5b7b8\") " Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.087318 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities" (OuterVolumeSpecName: "utilities") pod "e512f526-1785-4b1e-b180-312a7cb5b7b8" (UID: "e512f526-1785-4b1e-b180-312a7cb5b7b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.091990 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt" (OuterVolumeSpecName: "kube-api-access-h7xkt") pod "e512f526-1785-4b1e-b180-312a7cb5b7b8" (UID: "e512f526-1785-4b1e-b180-312a7cb5b7b8"). InnerVolumeSpecName "kube-api-access-h7xkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.110631 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e512f526-1785-4b1e-b180-312a7cb5b7b8" (UID: "e512f526-1785-4b1e-b180-312a7cb5b7b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.186920 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7xkt\" (UniqueName: \"kubernetes.io/projected/e512f526-1785-4b1e-b180-312a7cb5b7b8-kube-api-access-h7xkt\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.186961 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.186976 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512f526-1785-4b1e-b180-312a7cb5b7b8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.670994 4998 generic.go:334] "Generic (PLEG): container finished" podID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerID="20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c" exitCode=0 Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.671061 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerDied","Data":"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c"} Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.671102 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zffz4" event={"ID":"e512f526-1785-4b1e-b180-312a7cb5b7b8","Type":"ContainerDied","Data":"93a6ffe3de631f6cf59d61e43e302af694a2b44dcb9a34f999eeea1c75ca0615"} Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.671129 4998 scope.go:117] "RemoveContainer" containerID="20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.671129 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zffz4" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.693868 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.699200 4998 scope.go:117] "RemoveContainer" containerID="70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.700891 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zffz4"] Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.726163 4998 scope.go:117] "RemoveContainer" containerID="33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.745132 4998 scope.go:117] "RemoveContainer" containerID="20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c" Feb 03 07:25:12 crc kubenswrapper[4998]: E0203 07:25:12.745433 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c\": container with ID starting with 20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c not found: ID does not exist" containerID="20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.745497 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c"} err="failed to get container status \"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c\": rpc error: code = NotFound desc = could not find container \"20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c\": container with ID starting with 20953756096b39f37ab71a6dd5b595f8de037ed52bc20557f41a8271643f532c not found: ID does not exist" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.745530 4998 scope.go:117] "RemoveContainer" containerID="70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1" Feb 03 07:25:12 crc kubenswrapper[4998]: E0203 07:25:12.745880 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1\": container with ID starting with 70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1 not found: ID does not exist" containerID="70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.745982 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1"} err="failed to get container status \"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1\": rpc error: code = NotFound desc = could not find container \"70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1\": container with ID starting with 70184f8ee9b319c8d1c6dcbdd5342ad08de210d2ad4a0556c1bc500a13011fb1 not found: ID does not exist" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.746069 4998 scope.go:117] "RemoveContainer" containerID="33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac" Feb 03 07:25:12 crc kubenswrapper[4998]: E0203 07:25:12.746406 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac\": container with ID starting with 33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac not found: ID does not exist" containerID="33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.746432 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac"} err="failed to get container status \"33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac\": rpc error: code = NotFound desc = could not find container \"33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac\": container with ID starting with 33b0f5c928763fd5b1c88a8e11cf9ad69c11742d82fb578f7ecdccd02e1f3eac not found: ID does not exist" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.754145 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.754208 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.754252 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.755037 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:25:12 crc kubenswrapper[4998]: I0203 07:25:12.755124 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" gracePeriod=600 Feb 03 07:25:12 crc kubenswrapper[4998]: E0203 07:25:12.872846 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:25:13 crc kubenswrapper[4998]: I0203 07:25:13.682025 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" exitCode=0 Feb 03 07:25:13 crc kubenswrapper[4998]: I0203 07:25:13.682089 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e"} Feb 03 07:25:13 crc kubenswrapper[4998]: I0203 07:25:13.682121 4998 scope.go:117] "RemoveContainer" containerID="e17c1a4f147ea8f91238e8293d1424da4134dd9cd2d66aba31ea7c85fcceea85" Feb 03 07:25:13 crc kubenswrapper[4998]: I0203 07:25:13.682568 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:25:13 crc kubenswrapper[4998]: E0203 07:25:13.682759 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:25:14 crc kubenswrapper[4998]: I0203 07:25:14.437161 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" path="/var/lib/kubelet/pods/e512f526-1785-4b1e-b180-312a7cb5b7b8/volumes" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.761122 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:19 crc kubenswrapper[4998]: E0203 07:25:19.766684 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="extract-utilities" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.766734 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="extract-utilities" Feb 03 07:25:19 crc kubenswrapper[4998]: E0203 07:25:19.766763 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="registry-server" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.766772 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="registry-server" Feb 03 07:25:19 crc kubenswrapper[4998]: E0203 07:25:19.766807 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="extract-content" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.766831 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="extract-content" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.767109 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e512f526-1785-4b1e-b180-312a7cb5b7b8" containerName="registry-server" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.768923 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.770134 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.894785 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgkc2\" (UniqueName: \"kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.894859 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.895006 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.996843 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.997181 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgkc2\" (UniqueName: \"kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.997317 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.997334 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:19 crc kubenswrapper[4998]: I0203 07:25:19.997748 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:20 crc kubenswrapper[4998]: I0203 07:25:20.016059 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgkc2\" (UniqueName: \"kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2\") pod \"community-operators-ldzb6\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:20 crc kubenswrapper[4998]: I0203 07:25:20.087846 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:20 crc kubenswrapper[4998]: I0203 07:25:20.577480 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:20 crc kubenswrapper[4998]: I0203 07:25:20.744882 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerStarted","Data":"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1"} Feb 03 07:25:20 crc kubenswrapper[4998]: I0203 07:25:20.744984 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerStarted","Data":"ca34f1592fd66ef34110651af8b499c6348fd303f0285854dec724a3917841a5"} Feb 03 07:25:21 crc kubenswrapper[4998]: I0203 07:25:21.756720 4998 generic.go:334] "Generic (PLEG): container finished" podID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerID="51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1" exitCode=0 Feb 03 07:25:21 crc kubenswrapper[4998]: I0203 07:25:21.756870 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerDied","Data":"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1"} Feb 03 07:25:23 crc kubenswrapper[4998]: I0203 07:25:23.777912 4998 generic.go:334] "Generic (PLEG): container finished" podID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerID="5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c" exitCode=0 Feb 03 07:25:23 crc kubenswrapper[4998]: I0203 07:25:23.777958 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerDied","Data":"5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c"} Feb 03 07:25:24 crc kubenswrapper[4998]: I0203 07:25:24.786018 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerStarted","Data":"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e"} Feb 03 07:25:24 crc kubenswrapper[4998]: I0203 07:25:24.804874 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ldzb6" podStartSLOduration=3.324640185 podStartE2EDuration="5.80485267s" podCreationTimestamp="2026-02-03 07:25:19 +0000 UTC" firstStartedPulling="2026-02-03 07:25:21.759053595 +0000 UTC m=+2360.045747401" lastFinishedPulling="2026-02-03 07:25:24.23926609 +0000 UTC m=+2362.525959886" observedRunningTime="2026-02-03 07:25:24.802259597 +0000 UTC m=+2363.088953433" watchObservedRunningTime="2026-02-03 07:25:24.80485267 +0000 UTC m=+2363.091546486" Feb 03 07:25:26 crc kubenswrapper[4998]: I0203 07:25:26.428190 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:25:26 crc kubenswrapper[4998]: E0203 07:25:26.428450 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:25:30 crc kubenswrapper[4998]: I0203 07:25:30.088853 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:30 crc kubenswrapper[4998]: I0203 07:25:30.089183 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:30 crc kubenswrapper[4998]: I0203 07:25:30.151928 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:30 crc kubenswrapper[4998]: I0203 07:25:30.863264 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:30 crc kubenswrapper[4998]: I0203 07:25:30.911842 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:32 crc kubenswrapper[4998]: I0203 07:25:32.839933 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ldzb6" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="registry-server" containerID="cri-o://6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e" gracePeriod=2 Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.195625 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.388604 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content\") pod \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.388663 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgkc2\" (UniqueName: \"kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2\") pod \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.388705 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities\") pod \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\" (UID: \"2e9110cb-f69a-4eba-95a5-48b161fc7a02\") " Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.389852 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities" (OuterVolumeSpecName: "utilities") pod "2e9110cb-f69a-4eba-95a5-48b161fc7a02" (UID: "2e9110cb-f69a-4eba-95a5-48b161fc7a02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.393540 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2" (OuterVolumeSpecName: "kube-api-access-qgkc2") pod "2e9110cb-f69a-4eba-95a5-48b161fc7a02" (UID: "2e9110cb-f69a-4eba-95a5-48b161fc7a02"). InnerVolumeSpecName "kube-api-access-qgkc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.453270 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e9110cb-f69a-4eba-95a5-48b161fc7a02" (UID: "2e9110cb-f69a-4eba-95a5-48b161fc7a02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.489821 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.489914 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e9110cb-f69a-4eba-95a5-48b161fc7a02-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.489947 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgkc2\" (UniqueName: \"kubernetes.io/projected/2e9110cb-f69a-4eba-95a5-48b161fc7a02-kube-api-access-qgkc2\") on node \"crc\" DevicePath \"\"" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.848758 4998 generic.go:334] "Generic (PLEG): container finished" podID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerID="6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e" exitCode=0 Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.848821 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerDied","Data":"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e"} Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.848853 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldzb6" event={"ID":"2e9110cb-f69a-4eba-95a5-48b161fc7a02","Type":"ContainerDied","Data":"ca34f1592fd66ef34110651af8b499c6348fd303f0285854dec724a3917841a5"} Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.848866 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldzb6" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.848874 4998 scope.go:117] "RemoveContainer" containerID="6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.870883 4998 scope.go:117] "RemoveContainer" containerID="5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.888077 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.893433 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ldzb6"] Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.923591 4998 scope.go:117] "RemoveContainer" containerID="51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.938471 4998 scope.go:117] "RemoveContainer" containerID="6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e" Feb 03 07:25:33 crc kubenswrapper[4998]: E0203 07:25:33.938889 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e\": container with ID starting with 6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e not found: ID does not exist" containerID="6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.938931 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e"} err="failed to get container status \"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e\": rpc error: code = NotFound desc = could not find container \"6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e\": container with ID starting with 6f4f66481fc839774665a25e04ce5e96cec5dc004925b68781d1a7305251d78e not found: ID does not exist" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.938958 4998 scope.go:117] "RemoveContainer" containerID="5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c" Feb 03 07:25:33 crc kubenswrapper[4998]: E0203 07:25:33.939323 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c\": container with ID starting with 5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c not found: ID does not exist" containerID="5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.939356 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c"} err="failed to get container status \"5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c\": rpc error: code = NotFound desc = could not find container \"5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c\": container with ID starting with 5e7b9ccd7edf2cba1aa6b79f79217e0c3e9af6277547f67a2f99dd94bcac2b2c not found: ID does not exist" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.939378 4998 scope.go:117] "RemoveContainer" containerID="51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1" Feb 03 07:25:33 crc kubenswrapper[4998]: E0203 07:25:33.939616 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1\": container with ID starting with 51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1 not found: ID does not exist" containerID="51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1" Feb 03 07:25:33 crc kubenswrapper[4998]: I0203 07:25:33.939672 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1"} err="failed to get container status \"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1\": rpc error: code = NotFound desc = could not find container \"51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1\": container with ID starting with 51964e82583d871bece0e80e57773eee5c9f1969ad53a4b70d29b629356ce7e1 not found: ID does not exist" Feb 03 07:25:34 crc kubenswrapper[4998]: I0203 07:25:34.437349 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" path="/var/lib/kubelet/pods/2e9110cb-f69a-4eba-95a5-48b161fc7a02/volumes" Feb 03 07:25:39 crc kubenswrapper[4998]: I0203 07:25:39.427538 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:25:39 crc kubenswrapper[4998]: E0203 07:25:39.428321 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:25:54 crc kubenswrapper[4998]: I0203 07:25:54.427487 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:25:54 crc kubenswrapper[4998]: E0203 07:25:54.428354 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:26:05 crc kubenswrapper[4998]: I0203 07:26:05.427982 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:26:05 crc kubenswrapper[4998]: E0203 07:26:05.428684 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:26:16 crc kubenswrapper[4998]: I0203 07:26:16.427509 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:26:16 crc kubenswrapper[4998]: E0203 07:26:16.428409 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:26:27 crc kubenswrapper[4998]: I0203 07:26:27.428023 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:26:27 crc kubenswrapper[4998]: E0203 07:26:27.428917 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:26:40 crc kubenswrapper[4998]: I0203 07:26:40.427343 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:26:40 crc kubenswrapper[4998]: E0203 07:26:40.428198 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:26:51 crc kubenswrapper[4998]: I0203 07:26:51.429188 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:26:51 crc kubenswrapper[4998]: E0203 07:26:51.430365 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:27:05 crc kubenswrapper[4998]: I0203 07:27:05.428955 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:27:05 crc kubenswrapper[4998]: E0203 07:27:05.430113 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:27:18 crc kubenswrapper[4998]: I0203 07:27:18.428930 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:27:18 crc kubenswrapper[4998]: E0203 07:27:18.429535 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:27:31 crc kubenswrapper[4998]: I0203 07:27:31.427323 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:27:31 crc kubenswrapper[4998]: E0203 07:27:31.428254 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:27:45 crc kubenswrapper[4998]: I0203 07:27:45.427826 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:27:45 crc kubenswrapper[4998]: E0203 07:27:45.428930 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:27:56 crc kubenswrapper[4998]: I0203 07:27:56.428186 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:27:56 crc kubenswrapper[4998]: E0203 07:27:56.428988 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:28:10 crc kubenswrapper[4998]: I0203 07:28:10.427360 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:28:10 crc kubenswrapper[4998]: E0203 07:28:10.428073 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:28:23 crc kubenswrapper[4998]: I0203 07:28:23.428477 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:28:23 crc kubenswrapper[4998]: E0203 07:28:23.429238 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:28:37 crc kubenswrapper[4998]: I0203 07:28:37.428441 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:28:37 crc kubenswrapper[4998]: E0203 07:28:37.429492 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:28:51 crc kubenswrapper[4998]: I0203 07:28:51.427300 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:28:51 crc kubenswrapper[4998]: E0203 07:28:51.428248 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:29:04 crc kubenswrapper[4998]: I0203 07:29:04.427393 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:29:04 crc kubenswrapper[4998]: E0203 07:29:04.428866 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:29:17 crc kubenswrapper[4998]: I0203 07:29:17.427556 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:29:17 crc kubenswrapper[4998]: E0203 07:29:17.428215 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:29:29 crc kubenswrapper[4998]: I0203 07:29:29.427286 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:29:29 crc kubenswrapper[4998]: E0203 07:29:29.428033 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:29:41 crc kubenswrapper[4998]: I0203 07:29:41.427158 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:29:41 crc kubenswrapper[4998]: E0203 07:29:41.427952 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:29:55 crc kubenswrapper[4998]: I0203 07:29:55.427084 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:29:55 crc kubenswrapper[4998]: E0203 07:29:55.427905 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.144722 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx"] Feb 03 07:30:00 crc kubenswrapper[4998]: E0203 07:30:00.145508 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="extract-utilities" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.145523 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="extract-utilities" Feb 03 07:30:00 crc kubenswrapper[4998]: E0203 07:30:00.145546 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="extract-content" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.145552 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="extract-content" Feb 03 07:30:00 crc kubenswrapper[4998]: E0203 07:30:00.145562 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="registry-server" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.145568 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="registry-server" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.145693 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e9110cb-f69a-4eba-95a5-48b161fc7a02" containerName="registry-server" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.146160 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.149758 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.149912 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.159164 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx"] Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.301081 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.301174 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwvvn\" (UniqueName: \"kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.301347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.402522 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.402602 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.402636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwvvn\" (UniqueName: \"kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.403463 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.414733 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.427489 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwvvn\" (UniqueName: \"kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn\") pod \"collect-profiles-29501730-vx9gx\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.466470 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:00 crc kubenswrapper[4998]: I0203 07:30:00.904422 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx"] Feb 03 07:30:01 crc kubenswrapper[4998]: I0203 07:30:01.746336 4998 generic.go:334] "Generic (PLEG): container finished" podID="9a8f35dd-01c5-48a0-b17a-b681dc68312e" containerID="ea1bc8df95dce708e8d415e92ba0124af8a4f0e8a037ca907febd6cd298a6395" exitCode=0 Feb 03 07:30:01 crc kubenswrapper[4998]: I0203 07:30:01.746391 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" event={"ID":"9a8f35dd-01c5-48a0-b17a-b681dc68312e","Type":"ContainerDied","Data":"ea1bc8df95dce708e8d415e92ba0124af8a4f0e8a037ca907febd6cd298a6395"} Feb 03 07:30:01 crc kubenswrapper[4998]: I0203 07:30:01.746428 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" event={"ID":"9a8f35dd-01c5-48a0-b17a-b681dc68312e","Type":"ContainerStarted","Data":"b04cab6ec52f19b60c5e10958d2d31909e988e73878aa4aa7b1568940f8a308b"} Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.003736 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.143395 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume\") pod \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.143600 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwvvn\" (UniqueName: \"kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn\") pod \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.143700 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume\") pod \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\" (UID: \"9a8f35dd-01c5-48a0-b17a-b681dc68312e\") " Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.144277 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume" (OuterVolumeSpecName: "config-volume") pod "9a8f35dd-01c5-48a0-b17a-b681dc68312e" (UID: "9a8f35dd-01c5-48a0-b17a-b681dc68312e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.148198 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn" (OuterVolumeSpecName: "kube-api-access-mwvvn") pod "9a8f35dd-01c5-48a0-b17a-b681dc68312e" (UID: "9a8f35dd-01c5-48a0-b17a-b681dc68312e"). InnerVolumeSpecName "kube-api-access-mwvvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.149615 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9a8f35dd-01c5-48a0-b17a-b681dc68312e" (UID: "9a8f35dd-01c5-48a0-b17a-b681dc68312e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.245333 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a8f35dd-01c5-48a0-b17a-b681dc68312e-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.245365 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwvvn\" (UniqueName: \"kubernetes.io/projected/9a8f35dd-01c5-48a0-b17a-b681dc68312e-kube-api-access-mwvvn\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.245374 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a8f35dd-01c5-48a0-b17a-b681dc68312e-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.767095 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" event={"ID":"9a8f35dd-01c5-48a0-b17a-b681dc68312e","Type":"ContainerDied","Data":"b04cab6ec52f19b60c5e10958d2d31909e988e73878aa4aa7b1568940f8a308b"} Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.767400 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b04cab6ec52f19b60c5e10958d2d31909e988e73878aa4aa7b1568940f8a308b" Feb 03 07:30:03 crc kubenswrapper[4998]: I0203 07:30:03.767490 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx" Feb 03 07:30:04 crc kubenswrapper[4998]: I0203 07:30:04.067964 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm"] Feb 03 07:30:04 crc kubenswrapper[4998]: I0203 07:30:04.072937 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501685-vmrhm"] Feb 03 07:30:04 crc kubenswrapper[4998]: I0203 07:30:04.447577 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a571eff-c1d3-4fc0-84e4-43cadf0c7979" path="/var/lib/kubelet/pods/5a571eff-c1d3-4fc0-84e4-43cadf0c7979/volumes" Feb 03 07:30:06 crc kubenswrapper[4998]: I0203 07:30:06.393122 4998 scope.go:117] "RemoveContainer" containerID="3ba0e525e44872d49676bb37c726957410f36f5262a90d479d69331017f46227" Feb 03 07:30:06 crc kubenswrapper[4998]: I0203 07:30:06.428743 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:30:06 crc kubenswrapper[4998]: E0203 07:30:06.429231 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.047740 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:12 crc kubenswrapper[4998]: E0203 07:30:12.048684 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a8f35dd-01c5-48a0-b17a-b681dc68312e" containerName="collect-profiles" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.048697 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a8f35dd-01c5-48a0-b17a-b681dc68312e" containerName="collect-profiles" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.048865 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a8f35dd-01c5-48a0-b17a-b681dc68312e" containerName="collect-profiles" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.049754 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.063222 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqvc7\" (UniqueName: \"kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.063301 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.063343 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.066720 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.164253 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqvc7\" (UniqueName: \"kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.164549 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.164670 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.165222 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.165302 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.184028 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqvc7\" (UniqueName: \"kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7\") pod \"redhat-operators-cgckc\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.375251 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.826680 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:12 crc kubenswrapper[4998]: I0203 07:30:12.836407 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerStarted","Data":"02ec7830e8623b4762b192c6d30b4f6a838dc8862825f4aad3fe433e5a8bc74b"} Feb 03 07:30:13 crc kubenswrapper[4998]: I0203 07:30:13.845009 4998 generic.go:334] "Generic (PLEG): container finished" podID="7858afb3-a233-4be7-a25d-37294795cec3" containerID="54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2" exitCode=0 Feb 03 07:30:13 crc kubenswrapper[4998]: I0203 07:30:13.845065 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerDied","Data":"54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2"} Feb 03 07:30:13 crc kubenswrapper[4998]: I0203 07:30:13.846902 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:30:14 crc kubenswrapper[4998]: I0203 07:30:14.854238 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerStarted","Data":"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c"} Feb 03 07:30:15 crc kubenswrapper[4998]: I0203 07:30:15.861751 4998 generic.go:334] "Generic (PLEG): container finished" podID="7858afb3-a233-4be7-a25d-37294795cec3" containerID="7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c" exitCode=0 Feb 03 07:30:15 crc kubenswrapper[4998]: I0203 07:30:15.861808 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerDied","Data":"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c"} Feb 03 07:30:16 crc kubenswrapper[4998]: I0203 07:30:16.868632 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerStarted","Data":"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006"} Feb 03 07:30:16 crc kubenswrapper[4998]: I0203 07:30:16.889719 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cgckc" podStartSLOduration=2.422147684 podStartE2EDuration="4.889704076s" podCreationTimestamp="2026-02-03 07:30:12 +0000 UTC" firstStartedPulling="2026-02-03 07:30:13.846662541 +0000 UTC m=+2652.133356347" lastFinishedPulling="2026-02-03 07:30:16.314218933 +0000 UTC m=+2654.600912739" observedRunningTime="2026-02-03 07:30:16.885599159 +0000 UTC m=+2655.172292965" watchObservedRunningTime="2026-02-03 07:30:16.889704076 +0000 UTC m=+2655.176397882" Feb 03 07:30:17 crc kubenswrapper[4998]: I0203 07:30:17.427095 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:30:17 crc kubenswrapper[4998]: I0203 07:30:17.876727 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e"} Feb 03 07:30:22 crc kubenswrapper[4998]: I0203 07:30:22.376240 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:22 crc kubenswrapper[4998]: I0203 07:30:22.376825 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:22 crc kubenswrapper[4998]: I0203 07:30:22.418721 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:22 crc kubenswrapper[4998]: I0203 07:30:22.958430 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:23 crc kubenswrapper[4998]: I0203 07:30:23.004801 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:24 crc kubenswrapper[4998]: I0203 07:30:24.922321 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cgckc" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="registry-server" containerID="cri-o://c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006" gracePeriod=2 Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.529825 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.657304 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content\") pod \"7858afb3-a233-4be7-a25d-37294795cec3\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.657370 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities\") pod \"7858afb3-a233-4be7-a25d-37294795cec3\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.657442 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqvc7\" (UniqueName: \"kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7\") pod \"7858afb3-a233-4be7-a25d-37294795cec3\" (UID: \"7858afb3-a233-4be7-a25d-37294795cec3\") " Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.658203 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities" (OuterVolumeSpecName: "utilities") pod "7858afb3-a233-4be7-a25d-37294795cec3" (UID: "7858afb3-a233-4be7-a25d-37294795cec3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.664645 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7" (OuterVolumeSpecName: "kube-api-access-wqvc7") pod "7858afb3-a233-4be7-a25d-37294795cec3" (UID: "7858afb3-a233-4be7-a25d-37294795cec3"). InnerVolumeSpecName "kube-api-access-wqvc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.759245 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.759564 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqvc7\" (UniqueName: \"kubernetes.io/projected/7858afb3-a233-4be7-a25d-37294795cec3-kube-api-access-wqvc7\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.932462 4998 generic.go:334] "Generic (PLEG): container finished" podID="7858afb3-a233-4be7-a25d-37294795cec3" containerID="c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006" exitCode=0 Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.932509 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerDied","Data":"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006"} Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.932533 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgckc" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.932555 4998 scope.go:117] "RemoveContainer" containerID="c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.932539 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgckc" event={"ID":"7858afb3-a233-4be7-a25d-37294795cec3","Type":"ContainerDied","Data":"02ec7830e8623b4762b192c6d30b4f6a838dc8862825f4aad3fe433e5a8bc74b"} Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.953648 4998 scope.go:117] "RemoveContainer" containerID="7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.976244 4998 scope.go:117] "RemoveContainer" containerID="54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.997912 4998 scope.go:117] "RemoveContainer" containerID="c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006" Feb 03 07:30:25 crc kubenswrapper[4998]: E0203 07:30:25.998409 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006\": container with ID starting with c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006 not found: ID does not exist" containerID="c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.998458 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006"} err="failed to get container status \"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006\": rpc error: code = NotFound desc = could not find container \"c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006\": container with ID starting with c6e2f39d2678f0d17e539b9903f8cb88d12605f81e657cdf03da55d9a770c006 not found: ID does not exist" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.998484 4998 scope.go:117] "RemoveContainer" containerID="7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c" Feb 03 07:30:25 crc kubenswrapper[4998]: E0203 07:30:25.998930 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c\": container with ID starting with 7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c not found: ID does not exist" containerID="7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.998957 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c"} err="failed to get container status \"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c\": rpc error: code = NotFound desc = could not find container \"7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c\": container with ID starting with 7456d569871b8400dbea94d1d8585d7e15453bdc157a690cb686f0271cda880c not found: ID does not exist" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.998973 4998 scope.go:117] "RemoveContainer" containerID="54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2" Feb 03 07:30:25 crc kubenswrapper[4998]: E0203 07:30:25.999231 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2\": container with ID starting with 54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2 not found: ID does not exist" containerID="54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2" Feb 03 07:30:25 crc kubenswrapper[4998]: I0203 07:30:25.999252 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2"} err="failed to get container status \"54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2\": rpc error: code = NotFound desc = could not find container \"54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2\": container with ID starting with 54d062e2a34ddf43922f968fbe4f52fad6bc5bba994ef73362bbb391593a03b2 not found: ID does not exist" Feb 03 07:30:26 crc kubenswrapper[4998]: I0203 07:30:26.532994 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7858afb3-a233-4be7-a25d-37294795cec3" (UID: "7858afb3-a233-4be7-a25d-37294795cec3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:26 crc kubenswrapper[4998]: I0203 07:30:26.571678 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7858afb3-a233-4be7-a25d-37294795cec3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:26 crc kubenswrapper[4998]: I0203 07:30:26.858673 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:26 crc kubenswrapper[4998]: I0203 07:30:26.863268 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cgckc"] Feb 03 07:30:28 crc kubenswrapper[4998]: I0203 07:30:28.436134 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7858afb3-a233-4be7-a25d-37294795cec3" path="/var/lib/kubelet/pods/7858afb3-a233-4be7-a25d-37294795cec3/volumes" Feb 03 07:32:42 crc kubenswrapper[4998]: I0203 07:32:42.754840 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:32:42 crc kubenswrapper[4998]: I0203 07:32:42.755642 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:33:12 crc kubenswrapper[4998]: I0203 07:33:12.754793 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:33:12 crc kubenswrapper[4998]: I0203 07:33:12.755397 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:33:42 crc kubenswrapper[4998]: I0203 07:33:42.754441 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:33:42 crc kubenswrapper[4998]: I0203 07:33:42.754960 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:33:42 crc kubenswrapper[4998]: I0203 07:33:42.754997 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:33:42 crc kubenswrapper[4998]: I0203 07:33:42.755550 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:33:42 crc kubenswrapper[4998]: I0203 07:33:42.755599 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e" gracePeriod=600 Feb 03 07:33:43 crc kubenswrapper[4998]: I0203 07:33:43.473937 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e" exitCode=0 Feb 03 07:33:43 crc kubenswrapper[4998]: I0203 07:33:43.474017 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e"} Feb 03 07:33:43 crc kubenswrapper[4998]: I0203 07:33:43.474293 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3"} Feb 03 07:33:43 crc kubenswrapper[4998]: I0203 07:33:43.474315 4998 scope.go:117] "RemoveContainer" containerID="6b6bba6097c868cfa9bf394a6a455eeb55142d4b1ef2e0b1da75c62a9daa057e" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.650378 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:05 crc kubenswrapper[4998]: E0203 07:35:05.652115 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="extract-utilities" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.652137 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="extract-utilities" Feb 03 07:35:05 crc kubenswrapper[4998]: E0203 07:35:05.652153 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="registry-server" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.652161 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="registry-server" Feb 03 07:35:05 crc kubenswrapper[4998]: E0203 07:35:05.652181 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="extract-content" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.652186 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="extract-content" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.652361 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7858afb3-a233-4be7-a25d-37294795cec3" containerName="registry-server" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.653579 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.665321 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.794453 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.794539 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpt9g\" (UniqueName: \"kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.794592 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.895894 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.896043 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.896101 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpt9g\" (UniqueName: \"kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.896655 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.896829 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.918418 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpt9g\" (UniqueName: \"kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g\") pod \"redhat-marketplace-fhr9j\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:05 crc kubenswrapper[4998]: I0203 07:35:05.973810 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:06 crc kubenswrapper[4998]: I0203 07:35:06.447703 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:07 crc kubenswrapper[4998]: I0203 07:35:07.130986 4998 generic.go:334] "Generic (PLEG): container finished" podID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerID="10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e" exitCode=0 Feb 03 07:35:07 crc kubenswrapper[4998]: I0203 07:35:07.131064 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerDied","Data":"10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e"} Feb 03 07:35:07 crc kubenswrapper[4998]: I0203 07:35:07.131287 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerStarted","Data":"d92a1347ed6cccae455111f4deaab4f6adf476c902b1b3df4b464e6d636f983b"} Feb 03 07:35:09 crc kubenswrapper[4998]: I0203 07:35:09.152090 4998 generic.go:334] "Generic (PLEG): container finished" podID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerID="b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5" exitCode=0 Feb 03 07:35:09 crc kubenswrapper[4998]: I0203 07:35:09.152396 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerDied","Data":"b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5"} Feb 03 07:35:10 crc kubenswrapper[4998]: I0203 07:35:10.161435 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerStarted","Data":"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12"} Feb 03 07:35:10 crc kubenswrapper[4998]: I0203 07:35:10.182619 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fhr9j" podStartSLOduration=2.7496662670000003 podStartE2EDuration="5.18259779s" podCreationTimestamp="2026-02-03 07:35:05 +0000 UTC" firstStartedPulling="2026-02-03 07:35:07.132360542 +0000 UTC m=+2945.419054348" lastFinishedPulling="2026-02-03 07:35:09.565292065 +0000 UTC m=+2947.851985871" observedRunningTime="2026-02-03 07:35:10.17980618 +0000 UTC m=+2948.466500016" watchObservedRunningTime="2026-02-03 07:35:10.18259779 +0000 UTC m=+2948.469291616" Feb 03 07:35:15 crc kubenswrapper[4998]: I0203 07:35:15.974822 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:15 crc kubenswrapper[4998]: I0203 07:35:15.991849 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:16 crc kubenswrapper[4998]: I0203 07:35:16.042647 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:16 crc kubenswrapper[4998]: I0203 07:35:16.246903 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:16 crc kubenswrapper[4998]: I0203 07:35:16.323116 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:18 crc kubenswrapper[4998]: I0203 07:35:18.210527 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fhr9j" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="registry-server" containerID="cri-o://0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12" gracePeriod=2 Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.190657 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.227090 4998 generic.go:334] "Generic (PLEG): container finished" podID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerID="0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12" exitCode=0 Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.227145 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerDied","Data":"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12"} Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.227177 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhr9j" event={"ID":"15cecdd2-61fa-4419-a5f9-95a09d326633","Type":"ContainerDied","Data":"d92a1347ed6cccae455111f4deaab4f6adf476c902b1b3df4b464e6d636f983b"} Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.227197 4998 scope.go:117] "RemoveContainer" containerID="0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.227353 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhr9j" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.260733 4998 scope.go:117] "RemoveContainer" containerID="b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.277122 4998 scope.go:117] "RemoveContainer" containerID="10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.303310 4998 scope.go:117] "RemoveContainer" containerID="0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12" Feb 03 07:35:19 crc kubenswrapper[4998]: E0203 07:35:19.303823 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12\": container with ID starting with 0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12 not found: ID does not exist" containerID="0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.303872 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12"} err="failed to get container status \"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12\": rpc error: code = NotFound desc = could not find container \"0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12\": container with ID starting with 0b939eb364a2a391903b9399aef44dae8b6528d7b8f4211f55c77406aa470f12 not found: ID does not exist" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.303902 4998 scope.go:117] "RemoveContainer" containerID="b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5" Feb 03 07:35:19 crc kubenswrapper[4998]: E0203 07:35:19.304318 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5\": container with ID starting with b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5 not found: ID does not exist" containerID="b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.304348 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5"} err="failed to get container status \"b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5\": rpc error: code = NotFound desc = could not find container \"b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5\": container with ID starting with b6c78c329699660282bbfdaf4c677150d0f41cf97b5f71dbcf8637a0dcbfbcc5 not found: ID does not exist" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.304365 4998 scope.go:117] "RemoveContainer" containerID="10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e" Feb 03 07:35:19 crc kubenswrapper[4998]: E0203 07:35:19.305119 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e\": container with ID starting with 10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e not found: ID does not exist" containerID="10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.305176 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e"} err="failed to get container status \"10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e\": rpc error: code = NotFound desc = could not find container \"10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e\": container with ID starting with 10a20616ebd1becc3a9b0d6bc15b7171c6c768e62d4b99cee4ef18ae56da8c1e not found: ID does not exist" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.307747 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpt9g\" (UniqueName: \"kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g\") pod \"15cecdd2-61fa-4419-a5f9-95a09d326633\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.307992 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content\") pod \"15cecdd2-61fa-4419-a5f9-95a09d326633\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.308056 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities\") pod \"15cecdd2-61fa-4419-a5f9-95a09d326633\" (UID: \"15cecdd2-61fa-4419-a5f9-95a09d326633\") " Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.309574 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities" (OuterVolumeSpecName: "utilities") pod "15cecdd2-61fa-4419-a5f9-95a09d326633" (UID: "15cecdd2-61fa-4419-a5f9-95a09d326633"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.313568 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g" (OuterVolumeSpecName: "kube-api-access-gpt9g") pod "15cecdd2-61fa-4419-a5f9-95a09d326633" (UID: "15cecdd2-61fa-4419-a5f9-95a09d326633"). InnerVolumeSpecName "kube-api-access-gpt9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.341563 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15cecdd2-61fa-4419-a5f9-95a09d326633" (UID: "15cecdd2-61fa-4419-a5f9-95a09d326633"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.410524 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpt9g\" (UniqueName: \"kubernetes.io/projected/15cecdd2-61fa-4419-a5f9-95a09d326633-kube-api-access-gpt9g\") on node \"crc\" DevicePath \"\"" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.410593 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.410606 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15cecdd2-61fa-4419-a5f9-95a09d326633-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.589252 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:19 crc kubenswrapper[4998]: I0203 07:35:19.589330 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhr9j"] Feb 03 07:35:20 crc kubenswrapper[4998]: I0203 07:35:20.437984 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" path="/var/lib/kubelet/pods/15cecdd2-61fa-4419-a5f9-95a09d326633/volumes" Feb 03 07:36:12 crc kubenswrapper[4998]: I0203 07:36:12.753982 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:36:12 crc kubenswrapper[4998]: I0203 07:36:12.754571 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.475815 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:23 crc kubenswrapper[4998]: E0203 07:36:23.477218 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="registry-server" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.477243 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="registry-server" Feb 03 07:36:23 crc kubenswrapper[4998]: E0203 07:36:23.477279 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="extract-utilities" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.477290 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="extract-utilities" Feb 03 07:36:23 crc kubenswrapper[4998]: E0203 07:36:23.477312 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="extract-content" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.477321 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="extract-content" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.477508 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="15cecdd2-61fa-4419-a5f9-95a09d326633" containerName="registry-server" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.478897 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.486279 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.630754 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.630873 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs6h2\" (UniqueName: \"kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.630953 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.733582 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.733666 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.733730 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs6h2\" (UniqueName: \"kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.734270 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.734291 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.758620 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs6h2\" (UniqueName: \"kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2\") pod \"community-operators-rlmzg\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:23 crc kubenswrapper[4998]: I0203 07:36:23.816087 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:24 crc kubenswrapper[4998]: I0203 07:36:24.168918 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:24 crc kubenswrapper[4998]: I0203 07:36:24.697902 4998 generic.go:334] "Generic (PLEG): container finished" podID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerID="dcea844b64dd916f0651daa2017b30a825b4272255844f7887b9997a081e1150" exitCode=0 Feb 03 07:36:24 crc kubenswrapper[4998]: I0203 07:36:24.697962 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerDied","Data":"dcea844b64dd916f0651daa2017b30a825b4272255844f7887b9997a081e1150"} Feb 03 07:36:24 crc kubenswrapper[4998]: I0203 07:36:24.698297 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerStarted","Data":"76c3562a25a6d934564971381ea75fdc6dfc2f484430f3dd2db243a7812eb937"} Feb 03 07:36:24 crc kubenswrapper[4998]: I0203 07:36:24.700552 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:36:25 crc kubenswrapper[4998]: I0203 07:36:25.707325 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerStarted","Data":"c542193eb1ba5c2060bf13610870181ff180246b4bbaf046a33b26c98efede83"} Feb 03 07:36:26 crc kubenswrapper[4998]: I0203 07:36:26.717095 4998 generic.go:334] "Generic (PLEG): container finished" podID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerID="c542193eb1ba5c2060bf13610870181ff180246b4bbaf046a33b26c98efede83" exitCode=0 Feb 03 07:36:26 crc kubenswrapper[4998]: I0203 07:36:26.717152 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerDied","Data":"c542193eb1ba5c2060bf13610870181ff180246b4bbaf046a33b26c98efede83"} Feb 03 07:36:27 crc kubenswrapper[4998]: I0203 07:36:27.725213 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerStarted","Data":"cc59a1933bf46d157334dfcfb65d13b952dea04fe1712d14c8d97bbc61095017"} Feb 03 07:36:27 crc kubenswrapper[4998]: I0203 07:36:27.742142 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rlmzg" podStartSLOduration=2.10367656 podStartE2EDuration="4.742122738s" podCreationTimestamp="2026-02-03 07:36:23 +0000 UTC" firstStartedPulling="2026-02-03 07:36:24.700296919 +0000 UTC m=+3022.986990725" lastFinishedPulling="2026-02-03 07:36:27.338743087 +0000 UTC m=+3025.625436903" observedRunningTime="2026-02-03 07:36:27.739325998 +0000 UTC m=+3026.026019824" watchObservedRunningTime="2026-02-03 07:36:27.742122738 +0000 UTC m=+3026.028816544" Feb 03 07:36:33 crc kubenswrapper[4998]: I0203 07:36:33.816299 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:33 crc kubenswrapper[4998]: I0203 07:36:33.818511 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:33 crc kubenswrapper[4998]: I0203 07:36:33.866907 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:34 crc kubenswrapper[4998]: I0203 07:36:34.816842 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:34 crc kubenswrapper[4998]: I0203 07:36:34.860288 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:36 crc kubenswrapper[4998]: I0203 07:36:36.791002 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rlmzg" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="registry-server" containerID="cri-o://cc59a1933bf46d157334dfcfb65d13b952dea04fe1712d14c8d97bbc61095017" gracePeriod=2 Feb 03 07:36:39 crc kubenswrapper[4998]: I0203 07:36:39.826550 4998 generic.go:334] "Generic (PLEG): container finished" podID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerID="cc59a1933bf46d157334dfcfb65d13b952dea04fe1712d14c8d97bbc61095017" exitCode=0 Feb 03 07:36:39 crc kubenswrapper[4998]: I0203 07:36:39.826656 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerDied","Data":"cc59a1933bf46d157334dfcfb65d13b952dea04fe1712d14c8d97bbc61095017"} Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.446142 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.569140 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities\") pod \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.569209 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs6h2\" (UniqueName: \"kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2\") pod \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.569248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content\") pod \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\" (UID: \"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494\") " Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.571631 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities" (OuterVolumeSpecName: "utilities") pod "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" (UID: "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.582885 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2" (OuterVolumeSpecName: "kube-api-access-gs6h2") pod "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" (UID: "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494"). InnerVolumeSpecName "kube-api-access-gs6h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.621202 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" (UID: "9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.671307 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.671371 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs6h2\" (UniqueName: \"kubernetes.io/projected/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-kube-api-access-gs6h2\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.671388 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.835858 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rlmzg" event={"ID":"9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494","Type":"ContainerDied","Data":"76c3562a25a6d934564971381ea75fdc6dfc2f484430f3dd2db243a7812eb937"} Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.835922 4998 scope.go:117] "RemoveContainer" containerID="cc59a1933bf46d157334dfcfb65d13b952dea04fe1712d14c8d97bbc61095017" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.835922 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rlmzg" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.854582 4998 scope.go:117] "RemoveContainer" containerID="c542193eb1ba5c2060bf13610870181ff180246b4bbaf046a33b26c98efede83" Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.863253 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.877742 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rlmzg"] Feb 03 07:36:40 crc kubenswrapper[4998]: I0203 07:36:40.888911 4998 scope.go:117] "RemoveContainer" containerID="dcea844b64dd916f0651daa2017b30a825b4272255844f7887b9997a081e1150" Feb 03 07:36:42 crc kubenswrapper[4998]: I0203 07:36:42.438036 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" path="/var/lib/kubelet/pods/9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494/volumes" Feb 03 07:36:42 crc kubenswrapper[4998]: I0203 07:36:42.754587 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:36:42 crc kubenswrapper[4998]: I0203 07:36:42.754682 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:37:12 crc kubenswrapper[4998]: I0203 07:37:12.754892 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:37:12 crc kubenswrapper[4998]: I0203 07:37:12.755440 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:37:12 crc kubenswrapper[4998]: I0203 07:37:12.755478 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:37:12 crc kubenswrapper[4998]: I0203 07:37:12.756097 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:37:12 crc kubenswrapper[4998]: I0203 07:37:12.756152 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" gracePeriod=600 Feb 03 07:37:12 crc kubenswrapper[4998]: E0203 07:37:12.957754 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:37:13 crc kubenswrapper[4998]: I0203 07:37:13.068679 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" exitCode=0 Feb 03 07:37:13 crc kubenswrapper[4998]: I0203 07:37:13.068737 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3"} Feb 03 07:37:13 crc kubenswrapper[4998]: I0203 07:37:13.068826 4998 scope.go:117] "RemoveContainer" containerID="96d1e87e1ff254efe2d53cc5c3d38581a88383d17c7a9efadc06af05628ce16e" Feb 03 07:37:13 crc kubenswrapper[4998]: I0203 07:37:13.069898 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:37:13 crc kubenswrapper[4998]: E0203 07:37:13.070604 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:37:28 crc kubenswrapper[4998]: I0203 07:37:28.427851 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:37:28 crc kubenswrapper[4998]: E0203 07:37:28.429869 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:37:39 crc kubenswrapper[4998]: I0203 07:37:39.427509 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:37:39 crc kubenswrapper[4998]: E0203 07:37:39.428238 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:37:50 crc kubenswrapper[4998]: I0203 07:37:50.427466 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:37:50 crc kubenswrapper[4998]: E0203 07:37:50.428038 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:38:03 crc kubenswrapper[4998]: I0203 07:38:03.427910 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:38:03 crc kubenswrapper[4998]: E0203 07:38:03.428743 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:38:15 crc kubenswrapper[4998]: I0203 07:38:15.427291 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:38:15 crc kubenswrapper[4998]: E0203 07:38:15.428009 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:38:29 crc kubenswrapper[4998]: I0203 07:38:29.427355 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:38:29 crc kubenswrapper[4998]: E0203 07:38:29.427990 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:38:41 crc kubenswrapper[4998]: I0203 07:38:41.427616 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:38:41 crc kubenswrapper[4998]: E0203 07:38:41.428454 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:38:55 crc kubenswrapper[4998]: I0203 07:38:55.428009 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:38:55 crc kubenswrapper[4998]: E0203 07:38:55.428798 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:39:10 crc kubenswrapper[4998]: I0203 07:39:10.427432 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:39:10 crc kubenswrapper[4998]: E0203 07:39:10.429101 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.837069 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:12 crc kubenswrapper[4998]: E0203 07:39:12.838224 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="extract-content" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.838248 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="extract-content" Feb 03 07:39:12 crc kubenswrapper[4998]: E0203 07:39:12.838326 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="registry-server" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.838339 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="registry-server" Feb 03 07:39:12 crc kubenswrapper[4998]: E0203 07:39:12.838371 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="extract-utilities" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.838391 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="extract-utilities" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.838768 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ac6fa7d-c5b5-4bf9-be36-4bdb05f93494" containerName="registry-server" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.844543 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:12 crc kubenswrapper[4998]: I0203 07:39:12.856452 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.031514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.031602 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9ghf\" (UniqueName: \"kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.031653 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.132908 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9ghf\" (UniqueName: \"kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.133032 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.133081 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.133674 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.133913 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.157214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9ghf\" (UniqueName: \"kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf\") pod \"certified-operators-qhw8w\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.194549 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.680934 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.894426 4998 generic.go:334] "Generic (PLEG): container finished" podID="6db3651d-12ea-45f1-a58d-247e751cea08" containerID="a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3" exitCode=0 Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.894478 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerDied","Data":"a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3"} Feb 03 07:39:13 crc kubenswrapper[4998]: I0203 07:39:13.894508 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerStarted","Data":"cd4977a06b8da7e4cc8488776ec421aaa0cd147614e008fcaed05e17f348279e"} Feb 03 07:39:14 crc kubenswrapper[4998]: I0203 07:39:14.901639 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerStarted","Data":"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041"} Feb 03 07:39:15 crc kubenswrapper[4998]: I0203 07:39:15.909498 4998 generic.go:334] "Generic (PLEG): container finished" podID="6db3651d-12ea-45f1-a58d-247e751cea08" containerID="2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041" exitCode=0 Feb 03 07:39:15 crc kubenswrapper[4998]: I0203 07:39:15.909540 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerDied","Data":"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041"} Feb 03 07:39:17 crc kubenswrapper[4998]: I0203 07:39:17.924828 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerStarted","Data":"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f"} Feb 03 07:39:17 crc kubenswrapper[4998]: I0203 07:39:17.945243 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qhw8w" podStartSLOduration=2.443163772 podStartE2EDuration="5.945219778s" podCreationTimestamp="2026-02-03 07:39:12 +0000 UTC" firstStartedPulling="2026-02-03 07:39:13.895937172 +0000 UTC m=+3192.182630978" lastFinishedPulling="2026-02-03 07:39:17.397993178 +0000 UTC m=+3195.684686984" observedRunningTime="2026-02-03 07:39:17.9393131 +0000 UTC m=+3196.226006926" watchObservedRunningTime="2026-02-03 07:39:17.945219778 +0000 UTC m=+3196.231913584" Feb 03 07:39:23 crc kubenswrapper[4998]: I0203 07:39:23.195285 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:23 crc kubenswrapper[4998]: I0203 07:39:23.196173 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:23 crc kubenswrapper[4998]: I0203 07:39:23.253437 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:24 crc kubenswrapper[4998]: I0203 07:39:24.010204 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:24 crc kubenswrapper[4998]: I0203 07:39:24.053224 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:24 crc kubenswrapper[4998]: I0203 07:39:24.428606 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:39:24 crc kubenswrapper[4998]: E0203 07:39:24.429007 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:39:25 crc kubenswrapper[4998]: I0203 07:39:25.974877 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qhw8w" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="registry-server" containerID="cri-o://1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f" gracePeriod=2 Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.894059 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.989333 4998 generic.go:334] "Generic (PLEG): container finished" podID="6db3651d-12ea-45f1-a58d-247e751cea08" containerID="1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f" exitCode=0 Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.989396 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qhw8w" Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.989396 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerDied","Data":"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f"} Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.989459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qhw8w" event={"ID":"6db3651d-12ea-45f1-a58d-247e751cea08","Type":"ContainerDied","Data":"cd4977a06b8da7e4cc8488776ec421aaa0cd147614e008fcaed05e17f348279e"} Feb 03 07:39:27 crc kubenswrapper[4998]: I0203 07:39:27.989480 4998 scope.go:117] "RemoveContainer" containerID="1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.005087 4998 scope.go:117] "RemoveContainer" containerID="2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.025284 4998 scope.go:117] "RemoveContainer" containerID="a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.053151 4998 scope.go:117] "RemoveContainer" containerID="1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f" Feb 03 07:39:28 crc kubenswrapper[4998]: E0203 07:39:28.053750 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f\": container with ID starting with 1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f not found: ID does not exist" containerID="1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.053802 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f"} err="failed to get container status \"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f\": rpc error: code = NotFound desc = could not find container \"1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f\": container with ID starting with 1607251087bdd941913d48135ea70fd071255dd722beb81d2e09ef1c12101b9f not found: ID does not exist" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.053828 4998 scope.go:117] "RemoveContainer" containerID="2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.054096 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9ghf\" (UniqueName: \"kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf\") pod \"6db3651d-12ea-45f1-a58d-247e751cea08\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.054188 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities\") pod \"6db3651d-12ea-45f1-a58d-247e751cea08\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.054245 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content\") pod \"6db3651d-12ea-45f1-a58d-247e751cea08\" (UID: \"6db3651d-12ea-45f1-a58d-247e751cea08\") " Feb 03 07:39:28 crc kubenswrapper[4998]: E0203 07:39:28.054803 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041\": container with ID starting with 2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041 not found: ID does not exist" containerID="2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.054882 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041"} err="failed to get container status \"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041\": rpc error: code = NotFound desc = could not find container \"2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041\": container with ID starting with 2ae5a2a6c5944a456b2a52451a3b5753e606dffba676ea833479d5b60e68d041 not found: ID does not exist" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.054991 4998 scope.go:117] "RemoveContainer" containerID="a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.055247 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities" (OuterVolumeSpecName: "utilities") pod "6db3651d-12ea-45f1-a58d-247e751cea08" (UID: "6db3651d-12ea-45f1-a58d-247e751cea08"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:39:28 crc kubenswrapper[4998]: E0203 07:39:28.055455 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3\": container with ID starting with a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3 not found: ID does not exist" containerID="a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.055485 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3"} err="failed to get container status \"a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3\": rpc error: code = NotFound desc = could not find container \"a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3\": container with ID starting with a649c810cef4afdc691a3c5229e788aff42c5f413c887b3e68fca45e3cb1eff3 not found: ID does not exist" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.061516 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf" (OuterVolumeSpecName: "kube-api-access-p9ghf") pod "6db3651d-12ea-45f1-a58d-247e751cea08" (UID: "6db3651d-12ea-45f1-a58d-247e751cea08"). InnerVolumeSpecName "kube-api-access-p9ghf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.109503 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6db3651d-12ea-45f1-a58d-247e751cea08" (UID: "6db3651d-12ea-45f1-a58d-247e751cea08"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.155441 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9ghf\" (UniqueName: \"kubernetes.io/projected/6db3651d-12ea-45f1-a58d-247e751cea08-kube-api-access-p9ghf\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.155482 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.155491 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6db3651d-12ea-45f1-a58d-247e751cea08-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.324100 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.331199 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qhw8w"] Feb 03 07:39:28 crc kubenswrapper[4998]: I0203 07:39:28.438096 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" path="/var/lib/kubelet/pods/6db3651d-12ea-45f1-a58d-247e751cea08/volumes" Feb 03 07:39:37 crc kubenswrapper[4998]: I0203 07:39:37.427846 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:39:37 crc kubenswrapper[4998]: E0203 07:39:37.428601 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:39:51 crc kubenswrapper[4998]: I0203 07:39:51.427213 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:39:51 crc kubenswrapper[4998]: E0203 07:39:51.427971 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:40:06 crc kubenswrapper[4998]: I0203 07:40:06.427666 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:40:06 crc kubenswrapper[4998]: E0203 07:40:06.428459 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.132557 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:15 crc kubenswrapper[4998]: E0203 07:40:15.133670 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="extract-utilities" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.133686 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="extract-utilities" Feb 03 07:40:15 crc kubenswrapper[4998]: E0203 07:40:15.133701 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="extract-content" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.133709 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="extract-content" Feb 03 07:40:15 crc kubenswrapper[4998]: E0203 07:40:15.133731 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="registry-server" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.133737 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="registry-server" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.133906 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db3651d-12ea-45f1-a58d-247e751cea08" containerName="registry-server" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.137049 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.152323 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.262535 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vjnj\" (UniqueName: \"kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.262629 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.262733 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.363975 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vjnj\" (UniqueName: \"kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.364048 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.364116 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.364612 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.364722 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.385122 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vjnj\" (UniqueName: \"kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj\") pod \"redhat-operators-4lq2w\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.486001 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:15 crc kubenswrapper[4998]: I0203 07:40:15.919730 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:16 crc kubenswrapper[4998]: I0203 07:40:16.319535 4998 generic.go:334] "Generic (PLEG): container finished" podID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerID="c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640" exitCode=0 Feb 03 07:40:16 crc kubenswrapper[4998]: I0203 07:40:16.319578 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerDied","Data":"c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640"} Feb 03 07:40:16 crc kubenswrapper[4998]: I0203 07:40:16.319872 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerStarted","Data":"7cb30f845c6c08b1b3aff93ac13d3d88864da46149aba09fdf5941633d8ad9e5"} Feb 03 07:40:17 crc kubenswrapper[4998]: I0203 07:40:17.330910 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerStarted","Data":"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d"} Feb 03 07:40:18 crc kubenswrapper[4998]: I0203 07:40:18.340867 4998 generic.go:334] "Generic (PLEG): container finished" podID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerID="1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d" exitCode=0 Feb 03 07:40:18 crc kubenswrapper[4998]: I0203 07:40:18.340915 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerDied","Data":"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d"} Feb 03 07:40:19 crc kubenswrapper[4998]: I0203 07:40:19.355366 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerStarted","Data":"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa"} Feb 03 07:40:19 crc kubenswrapper[4998]: I0203 07:40:19.379201 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4lq2w" podStartSLOduration=1.844045082 podStartE2EDuration="4.379174348s" podCreationTimestamp="2026-02-03 07:40:15 +0000 UTC" firstStartedPulling="2026-02-03 07:40:16.321933785 +0000 UTC m=+3254.608627591" lastFinishedPulling="2026-02-03 07:40:18.857063051 +0000 UTC m=+3257.143756857" observedRunningTime="2026-02-03 07:40:19.370818971 +0000 UTC m=+3257.657512797" watchObservedRunningTime="2026-02-03 07:40:19.379174348 +0000 UTC m=+3257.665868164" Feb 03 07:40:19 crc kubenswrapper[4998]: I0203 07:40:19.427736 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:40:19 crc kubenswrapper[4998]: E0203 07:40:19.428123 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:40:25 crc kubenswrapper[4998]: I0203 07:40:25.486680 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:25 crc kubenswrapper[4998]: I0203 07:40:25.487673 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:25 crc kubenswrapper[4998]: I0203 07:40:25.540225 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:26 crc kubenswrapper[4998]: I0203 07:40:26.450252 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:26 crc kubenswrapper[4998]: I0203 07:40:26.572789 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:28 crc kubenswrapper[4998]: I0203 07:40:28.418963 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4lq2w" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="registry-server" containerID="cri-o://ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa" gracePeriod=2 Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.412863 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.430113 4998 generic.go:334] "Generic (PLEG): container finished" podID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerID="ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa" exitCode=0 Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.430166 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerDied","Data":"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa"} Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.430195 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4lq2w" event={"ID":"156d3bb3-1a10-450c-95de-2e8a6b2d4522","Type":"ContainerDied","Data":"7cb30f845c6c08b1b3aff93ac13d3d88864da46149aba09fdf5941633d8ad9e5"} Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.430211 4998 scope.go:117] "RemoveContainer" containerID="ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.430375 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4lq2w" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.454801 4998 scope.go:117] "RemoveContainer" containerID="1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.474465 4998 scope.go:117] "RemoveContainer" containerID="c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.496201 4998 scope.go:117] "RemoveContainer" containerID="ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa" Feb 03 07:40:29 crc kubenswrapper[4998]: E0203 07:40:29.496618 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa\": container with ID starting with ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa not found: ID does not exist" containerID="ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.496649 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa"} err="failed to get container status \"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa\": rpc error: code = NotFound desc = could not find container \"ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa\": container with ID starting with ee7ea196f4f52b7e862cd3015f7ece1cf6dd60d58b73459a394aa3f124e68baa not found: ID does not exist" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.496670 4998 scope.go:117] "RemoveContainer" containerID="1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d" Feb 03 07:40:29 crc kubenswrapper[4998]: E0203 07:40:29.496932 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d\": container with ID starting with 1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d not found: ID does not exist" containerID="1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.496953 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d"} err="failed to get container status \"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d\": rpc error: code = NotFound desc = could not find container \"1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d\": container with ID starting with 1c7a27917f819c6e16d45f4463ca0888bd58065239f1f85d5ea0d04bbd95069d not found: ID does not exist" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.496965 4998 scope.go:117] "RemoveContainer" containerID="c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640" Feb 03 07:40:29 crc kubenswrapper[4998]: E0203 07:40:29.497134 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640\": container with ID starting with c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640 not found: ID does not exist" containerID="c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.497153 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640"} err="failed to get container status \"c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640\": rpc error: code = NotFound desc = could not find container \"c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640\": container with ID starting with c605dbc1a817920a8e0ee93ca496ce8b1e0ac876e592d8a41a888e0e1c00c640 not found: ID does not exist" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.570163 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content\") pod \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.570248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vjnj\" (UniqueName: \"kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj\") pod \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.570365 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities\") pod \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\" (UID: \"156d3bb3-1a10-450c-95de-2e8a6b2d4522\") " Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.571166 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities" (OuterVolumeSpecName: "utilities") pod "156d3bb3-1a10-450c-95de-2e8a6b2d4522" (UID: "156d3bb3-1a10-450c-95de-2e8a6b2d4522"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.575693 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj" (OuterVolumeSpecName: "kube-api-access-2vjnj") pod "156d3bb3-1a10-450c-95de-2e8a6b2d4522" (UID: "156d3bb3-1a10-450c-95de-2e8a6b2d4522"). InnerVolumeSpecName "kube-api-access-2vjnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.672007 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.672045 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vjnj\" (UniqueName: \"kubernetes.io/projected/156d3bb3-1a10-450c-95de-2e8a6b2d4522-kube-api-access-2vjnj\") on node \"crc\" DevicePath \"\"" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.696261 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "156d3bb3-1a10-450c-95de-2e8a6b2d4522" (UID: "156d3bb3-1a10-450c-95de-2e8a6b2d4522"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.762157 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.768469 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4lq2w"] Feb 03 07:40:29 crc kubenswrapper[4998]: I0203 07:40:29.773758 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/156d3bb3-1a10-450c-95de-2e8a6b2d4522-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:40:30 crc kubenswrapper[4998]: I0203 07:40:30.427706 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:40:30 crc kubenswrapper[4998]: E0203 07:40:30.428103 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:40:30 crc kubenswrapper[4998]: I0203 07:40:30.443833 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" path="/var/lib/kubelet/pods/156d3bb3-1a10-450c-95de-2e8a6b2d4522/volumes" Feb 03 07:40:41 crc kubenswrapper[4998]: I0203 07:40:41.427850 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:40:41 crc kubenswrapper[4998]: E0203 07:40:41.428541 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:40:55 crc kubenswrapper[4998]: I0203 07:40:55.427821 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:40:55 crc kubenswrapper[4998]: E0203 07:40:55.428628 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:41:10 crc kubenswrapper[4998]: I0203 07:41:10.428040 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:41:10 crc kubenswrapper[4998]: E0203 07:41:10.428880 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:41:23 crc kubenswrapper[4998]: I0203 07:41:23.427535 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:41:23 crc kubenswrapper[4998]: E0203 07:41:23.428316 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:41:35 crc kubenswrapper[4998]: I0203 07:41:35.427626 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:41:35 crc kubenswrapper[4998]: E0203 07:41:35.428374 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:41:48 crc kubenswrapper[4998]: I0203 07:41:48.427466 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:41:48 crc kubenswrapper[4998]: E0203 07:41:48.428261 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:42:03 crc kubenswrapper[4998]: I0203 07:42:03.428234 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:42:03 crc kubenswrapper[4998]: E0203 07:42:03.429496 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:42:16 crc kubenswrapper[4998]: I0203 07:42:16.428445 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:42:17 crc kubenswrapper[4998]: I0203 07:42:17.172946 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b"} Feb 03 07:44:42 crc kubenswrapper[4998]: I0203 07:44:42.754093 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:44:42 crc kubenswrapper[4998]: I0203 07:44:42.754618 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.155184 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g"] Feb 03 07:45:00 crc kubenswrapper[4998]: E0203 07:45:00.156125 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="extract-utilities" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.156143 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="extract-utilities" Feb 03 07:45:00 crc kubenswrapper[4998]: E0203 07:45:00.156179 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="registry-server" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.156189 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="registry-server" Feb 03 07:45:00 crc kubenswrapper[4998]: E0203 07:45:00.156204 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="extract-content" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.156211 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="extract-content" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.156366 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="156d3bb3-1a10-450c-95de-2e8a6b2d4522" containerName="registry-server" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.156995 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.159306 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.161139 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.164346 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g"] Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.231001 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.231105 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xcb2\" (UniqueName: \"kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.231151 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.332098 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.332189 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xcb2\" (UniqueName: \"kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.332226 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.333318 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.347962 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.358809 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xcb2\" (UniqueName: \"kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2\") pod \"collect-profiles-29501745-klt7g\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:00 crc kubenswrapper[4998]: I0203 07:45:00.482496 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:01 crc kubenswrapper[4998]: I0203 07:45:01.702926 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g"] Feb 03 07:45:02 crc kubenswrapper[4998]: I0203 07:45:02.299061 4998 generic.go:334] "Generic (PLEG): container finished" podID="55f98dda-91a3-458f-9678-a547da6710b6" containerID="8ab0516c7335841f7c6eff7492237ec5aa64c04a2c50c48514d16db82f9601fe" exitCode=0 Feb 03 07:45:02 crc kubenswrapper[4998]: I0203 07:45:02.299271 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" event={"ID":"55f98dda-91a3-458f-9678-a547da6710b6","Type":"ContainerDied","Data":"8ab0516c7335841f7c6eff7492237ec5aa64c04a2c50c48514d16db82f9601fe"} Feb 03 07:45:02 crc kubenswrapper[4998]: I0203 07:45:02.299587 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" event={"ID":"55f98dda-91a3-458f-9678-a547da6710b6","Type":"ContainerStarted","Data":"65e85705cbe6883fe6420e520c24953863bcfdec235853bd3890b63b96f490e3"} Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.551387 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.673707 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume\") pod \"55f98dda-91a3-458f-9678-a547da6710b6\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.673817 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume\") pod \"55f98dda-91a3-458f-9678-a547da6710b6\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.673897 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xcb2\" (UniqueName: \"kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2\") pod \"55f98dda-91a3-458f-9678-a547da6710b6\" (UID: \"55f98dda-91a3-458f-9678-a547da6710b6\") " Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.674827 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume" (OuterVolumeSpecName: "config-volume") pod "55f98dda-91a3-458f-9678-a547da6710b6" (UID: "55f98dda-91a3-458f-9678-a547da6710b6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.680001 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "55f98dda-91a3-458f-9678-a547da6710b6" (UID: "55f98dda-91a3-458f-9678-a547da6710b6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.680098 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2" (OuterVolumeSpecName: "kube-api-access-5xcb2") pod "55f98dda-91a3-458f-9678-a547da6710b6" (UID: "55f98dda-91a3-458f-9678-a547da6710b6"). InnerVolumeSpecName "kube-api-access-5xcb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.775906 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55f98dda-91a3-458f-9678-a547da6710b6-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.775946 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55f98dda-91a3-458f-9678-a547da6710b6-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:03 crc kubenswrapper[4998]: I0203 07:45:03.775957 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xcb2\" (UniqueName: \"kubernetes.io/projected/55f98dda-91a3-458f-9678-a547da6710b6-kube-api-access-5xcb2\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:04 crc kubenswrapper[4998]: I0203 07:45:04.312368 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" event={"ID":"55f98dda-91a3-458f-9678-a547da6710b6","Type":"ContainerDied","Data":"65e85705cbe6883fe6420e520c24953863bcfdec235853bd3890b63b96f490e3"} Feb 03 07:45:04 crc kubenswrapper[4998]: I0203 07:45:04.312403 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65e85705cbe6883fe6420e520c24953863bcfdec235853bd3890b63b96f490e3" Feb 03 07:45:04 crc kubenswrapper[4998]: I0203 07:45:04.312442 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g" Feb 03 07:45:04 crc kubenswrapper[4998]: I0203 07:45:04.645076 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz"] Feb 03 07:45:04 crc kubenswrapper[4998]: I0203 07:45:04.649940 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-m48cz"] Feb 03 07:45:06 crc kubenswrapper[4998]: I0203 07:45:06.435750 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3825c7e6-09d8-43cb-bdd0-b71fb2690844" path="/var/lib/kubelet/pods/3825c7e6-09d8-43cb-bdd0-b71fb2690844/volumes" Feb 03 07:45:06 crc kubenswrapper[4998]: I0203 07:45:06.695857 4998 scope.go:117] "RemoveContainer" containerID="3e044462a2ceba5cd85c9ae93e890507060884ab0cad0ece206a95dd44f10a1e" Feb 03 07:45:12 crc kubenswrapper[4998]: I0203 07:45:12.753949 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:45:12 crc kubenswrapper[4998]: I0203 07:45:12.754551 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:45:42 crc kubenswrapper[4998]: I0203 07:45:42.754826 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:45:42 crc kubenswrapper[4998]: I0203 07:45:42.756155 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:45:42 crc kubenswrapper[4998]: I0203 07:45:42.756640 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:45:42 crc kubenswrapper[4998]: I0203 07:45:42.758356 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:45:42 crc kubenswrapper[4998]: I0203 07:45:42.758517 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b" gracePeriod=600 Feb 03 07:45:43 crc kubenswrapper[4998]: I0203 07:45:43.590574 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b" exitCode=0 Feb 03 07:45:43 crc kubenswrapper[4998]: I0203 07:45:43.590628 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b"} Feb 03 07:45:43 crc kubenswrapper[4998]: I0203 07:45:43.591270 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881"} Feb 03 07:45:43 crc kubenswrapper[4998]: I0203 07:45:43.591295 4998 scope.go:117] "RemoveContainer" containerID="df196c0f01992317b302260b7979b96ea091f5fea889fd76e77fe90a1aef9bf3" Feb 03 07:48:12 crc kubenswrapper[4998]: I0203 07:48:12.754185 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:48:12 crc kubenswrapper[4998]: I0203 07:48:12.755048 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:48:42 crc kubenswrapper[4998]: I0203 07:48:42.754843 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:48:42 crc kubenswrapper[4998]: I0203 07:48:42.755348 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:49:12 crc kubenswrapper[4998]: I0203 07:49:12.754604 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:49:12 crc kubenswrapper[4998]: I0203 07:49:12.755255 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:49:12 crc kubenswrapper[4998]: I0203 07:49:12.755302 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:49:12 crc kubenswrapper[4998]: I0203 07:49:12.755965 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:49:12 crc kubenswrapper[4998]: I0203 07:49:12.756018 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" gracePeriod=600 Feb 03 07:49:13 crc kubenswrapper[4998]: I0203 07:49:13.078739 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" exitCode=0 Feb 03 07:49:13 crc kubenswrapper[4998]: I0203 07:49:13.078807 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881"} Feb 03 07:49:13 crc kubenswrapper[4998]: I0203 07:49:13.079234 4998 scope.go:117] "RemoveContainer" containerID="b49fba429211fe3bba25de88f6b60869b047b3f46ed864ab01dc7a70af0efc3b" Feb 03 07:49:13 crc kubenswrapper[4998]: E0203 07:49:13.429141 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:49:14 crc kubenswrapper[4998]: I0203 07:49:14.086614 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:49:14 crc kubenswrapper[4998]: E0203 07:49:14.086853 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:49:29 crc kubenswrapper[4998]: I0203 07:49:29.427451 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:49:29 crc kubenswrapper[4998]: E0203 07:49:29.428080 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:49:39 crc kubenswrapper[4998]: I0203 07:49:39.950903 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:39 crc kubenswrapper[4998]: E0203 07:49:39.951935 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55f98dda-91a3-458f-9678-a547da6710b6" containerName="collect-profiles" Feb 03 07:49:39 crc kubenswrapper[4998]: I0203 07:49:39.951949 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="55f98dda-91a3-458f-9678-a547da6710b6" containerName="collect-profiles" Feb 03 07:49:39 crc kubenswrapper[4998]: I0203 07:49:39.952095 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="55f98dda-91a3-458f-9678-a547da6710b6" containerName="collect-profiles" Feb 03 07:49:39 crc kubenswrapper[4998]: I0203 07:49:39.953178 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:39 crc kubenswrapper[4998]: I0203 07:49:39.967230 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.044891 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.044958 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.045040 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg7pb\" (UniqueName: \"kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.146490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.146550 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.146628 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg7pb\" (UniqueName: \"kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.147935 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.147970 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.166551 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg7pb\" (UniqueName: \"kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb\") pod \"certified-operators-gz2cv\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.277908 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:40 crc kubenswrapper[4998]: I0203 07:49:40.746199 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:41 crc kubenswrapper[4998]: I0203 07:49:41.259614 4998 generic.go:334] "Generic (PLEG): container finished" podID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerID="76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6" exitCode=0 Feb 03 07:49:41 crc kubenswrapper[4998]: I0203 07:49:41.259726 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerDied","Data":"76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6"} Feb 03 07:49:41 crc kubenswrapper[4998]: I0203 07:49:41.259994 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerStarted","Data":"0d96bddd9673609275548c5696663f3b1539d98f8f03fa2b05edf667dbb0c85c"} Feb 03 07:49:41 crc kubenswrapper[4998]: I0203 07:49:41.261943 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:49:41 crc kubenswrapper[4998]: I0203 07:49:41.427021 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:49:41 crc kubenswrapper[4998]: E0203 07:49:41.427419 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:49:42 crc kubenswrapper[4998]: I0203 07:49:42.269201 4998 generic.go:334] "Generic (PLEG): container finished" podID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerID="9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5" exitCode=0 Feb 03 07:49:42 crc kubenswrapper[4998]: I0203 07:49:42.269312 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerDied","Data":"9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5"} Feb 03 07:49:43 crc kubenswrapper[4998]: I0203 07:49:43.277630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerStarted","Data":"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf"} Feb 03 07:49:43 crc kubenswrapper[4998]: I0203 07:49:43.303454 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gz2cv" podStartSLOduration=2.788187624 podStartE2EDuration="4.303430188s" podCreationTimestamp="2026-02-03 07:49:39 +0000 UTC" firstStartedPulling="2026-02-03 07:49:41.261005245 +0000 UTC m=+3819.547699051" lastFinishedPulling="2026-02-03 07:49:42.776247809 +0000 UTC m=+3821.062941615" observedRunningTime="2026-02-03 07:49:43.297545272 +0000 UTC m=+3821.584239078" watchObservedRunningTime="2026-02-03 07:49:43.303430188 +0000 UTC m=+3821.590124014" Feb 03 07:49:50 crc kubenswrapper[4998]: I0203 07:49:50.279023 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:50 crc kubenswrapper[4998]: I0203 07:49:50.279580 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:50 crc kubenswrapper[4998]: I0203 07:49:50.345378 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:50 crc kubenswrapper[4998]: I0203 07:49:50.397915 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:50 crc kubenswrapper[4998]: I0203 07:49:50.585972 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.354931 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gz2cv" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="registry-server" containerID="cri-o://d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf" gracePeriod=2 Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.743143 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.838820 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities\") pod \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.838909 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg7pb\" (UniqueName: \"kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb\") pod \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.838957 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content\") pod \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\" (UID: \"0fb9a6d9-c18e-485d-87ce-2eed402236b4\") " Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.839579 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities" (OuterVolumeSpecName: "utilities") pod "0fb9a6d9-c18e-485d-87ce-2eed402236b4" (UID: "0fb9a6d9-c18e-485d-87ce-2eed402236b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.846168 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb" (OuterVolumeSpecName: "kube-api-access-vg7pb") pod "0fb9a6d9-c18e-485d-87ce-2eed402236b4" (UID: "0fb9a6d9-c18e-485d-87ce-2eed402236b4"). InnerVolumeSpecName "kube-api-access-vg7pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.909936 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fb9a6d9-c18e-485d-87ce-2eed402236b4" (UID: "0fb9a6d9-c18e-485d-87ce-2eed402236b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.941772 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.941879 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb9a6d9-c18e-485d-87ce-2eed402236b4-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:49:52 crc kubenswrapper[4998]: I0203 07:49:52.941899 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg7pb\" (UniqueName: \"kubernetes.io/projected/0fb9a6d9-c18e-485d-87ce-2eed402236b4-kube-api-access-vg7pb\") on node \"crc\" DevicePath \"\"" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.363562 4998 generic.go:334] "Generic (PLEG): container finished" podID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerID="d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf" exitCode=0 Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.363610 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerDied","Data":"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf"} Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.363644 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz2cv" event={"ID":"0fb9a6d9-c18e-485d-87ce-2eed402236b4","Type":"ContainerDied","Data":"0d96bddd9673609275548c5696663f3b1539d98f8f03fa2b05edf667dbb0c85c"} Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.363662 4998 scope.go:117] "RemoveContainer" containerID="d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.363689 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz2cv" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.390878 4998 scope.go:117] "RemoveContainer" containerID="9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.404919 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.414170 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gz2cv"] Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.430629 4998 scope.go:117] "RemoveContainer" containerID="76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.450371 4998 scope.go:117] "RemoveContainer" containerID="d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf" Feb 03 07:49:53 crc kubenswrapper[4998]: E0203 07:49:53.450813 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf\": container with ID starting with d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf not found: ID does not exist" containerID="d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.450853 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf"} err="failed to get container status \"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf\": rpc error: code = NotFound desc = could not find container \"d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf\": container with ID starting with d42139a82201e0ebfaa344fd2abd2e6f550238611da5bd2c01b499da17299ddf not found: ID does not exist" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.450876 4998 scope.go:117] "RemoveContainer" containerID="9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5" Feb 03 07:49:53 crc kubenswrapper[4998]: E0203 07:49:53.451444 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5\": container with ID starting with 9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5 not found: ID does not exist" containerID="9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.451509 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5"} err="failed to get container status \"9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5\": rpc error: code = NotFound desc = could not find container \"9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5\": container with ID starting with 9565c9f8aed58a3ceb6ac56de7f387a17fd8733d706e20a6036f98ac634ed7e5 not found: ID does not exist" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.451546 4998 scope.go:117] "RemoveContainer" containerID="76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6" Feb 03 07:49:53 crc kubenswrapper[4998]: E0203 07:49:53.451949 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6\": container with ID starting with 76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6 not found: ID does not exist" containerID="76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6" Feb 03 07:49:53 crc kubenswrapper[4998]: I0203 07:49:53.452014 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6"} err="failed to get container status \"76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6\": rpc error: code = NotFound desc = could not find container \"76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6\": container with ID starting with 76d88a96015b56674f35f9336dc89820bcc305b216aff30fe18204382db1a0f6 not found: ID does not exist" Feb 03 07:49:54 crc kubenswrapper[4998]: I0203 07:49:54.445178 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" path="/var/lib/kubelet/pods/0fb9a6d9-c18e-485d-87ce-2eed402236b4/volumes" Feb 03 07:49:55 crc kubenswrapper[4998]: I0203 07:49:55.427874 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:49:55 crc kubenswrapper[4998]: E0203 07:49:55.428234 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:50:09 crc kubenswrapper[4998]: I0203 07:50:09.428688 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:50:09 crc kubenswrapper[4998]: E0203 07:50:09.429427 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:50:20 crc kubenswrapper[4998]: I0203 07:50:20.428147 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:50:20 crc kubenswrapper[4998]: E0203 07:50:20.428733 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:50:34 crc kubenswrapper[4998]: I0203 07:50:34.429066 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:50:34 crc kubenswrapper[4998]: E0203 07:50:34.429939 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:50:48 crc kubenswrapper[4998]: I0203 07:50:48.427635 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:50:48 crc kubenswrapper[4998]: E0203 07:50:48.428469 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:51:01 crc kubenswrapper[4998]: I0203 07:51:01.428415 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:51:01 crc kubenswrapper[4998]: E0203 07:51:01.429290 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:51:16 crc kubenswrapper[4998]: I0203 07:51:16.427241 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:51:16 crc kubenswrapper[4998]: E0203 07:51:16.428018 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.735897 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:22 crc kubenswrapper[4998]: E0203 07:51:22.736429 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="registry-server" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.736440 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="registry-server" Feb 03 07:51:22 crc kubenswrapper[4998]: E0203 07:51:22.736452 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="extract-utilities" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.736461 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="extract-utilities" Feb 03 07:51:22 crc kubenswrapper[4998]: E0203 07:51:22.736480 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="extract-content" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.736486 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="extract-content" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.736636 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fb9a6d9-c18e-485d-87ce-2eed402236b4" containerName="registry-server" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.738875 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.753088 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.831018 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb462\" (UniqueName: \"kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.831096 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.831157 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.932172 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb462\" (UniqueName: \"kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.932270 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.932323 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.933002 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.933034 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:22 crc kubenswrapper[4998]: I0203 07:51:22.951909 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb462\" (UniqueName: \"kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462\") pod \"community-operators-v74nj\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:23 crc kubenswrapper[4998]: I0203 07:51:23.061468 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:23 crc kubenswrapper[4998]: I0203 07:51:23.532085 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.068674 4998 generic.go:334] "Generic (PLEG): container finished" podID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerID="c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e" exitCode=0 Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.068747 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerDied","Data":"c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e"} Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.068948 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerStarted","Data":"04cb93dbbe8cf053d120731e83b690230156e27c5a35ee58ab822998b744dc26"} Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.534512 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.537472 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.546837 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.661033 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cwkw\" (UniqueName: \"kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.661086 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.661229 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.762979 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.763057 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cwkw\" (UniqueName: \"kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.763084 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.763618 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.764096 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.789459 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cwkw\" (UniqueName: \"kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw\") pod \"redhat-marketplace-wz5xc\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:24 crc kubenswrapper[4998]: I0203 07:51:24.853947 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.262469 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.536712 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.538593 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.545835 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.697725 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.698123 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx56k\" (UniqueName: \"kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.698207 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.799556 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.799601 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx56k\" (UniqueName: \"kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.799663 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.800222 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.800224 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.819765 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx56k\" (UniqueName: \"kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k\") pod \"redhat-operators-wzn9w\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:25 crc kubenswrapper[4998]: I0203 07:51:25.858051 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.084112 4998 generic.go:334] "Generic (PLEG): container finished" podID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerID="21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11" exitCode=0 Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.084397 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerDied","Data":"21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11"} Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.084442 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerStarted","Data":"e6248d50231e1e1b8dedbd4b4242a5bd646d25ae63aa32334a4aee1e26bff70b"} Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.087017 4998 generic.go:334] "Generic (PLEG): container finished" podID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerID="683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed" exitCode=0 Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.087060 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerDied","Data":"683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed"} Feb 03 07:51:26 crc kubenswrapper[4998]: I0203 07:51:26.327048 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:26 crc kubenswrapper[4998]: W0203 07:51:26.329839 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod944a5090_1b21_4050_a7d5_33e1cd64be97.slice/crio-7a00bf2b94b27e063e7d7ec9336a28a0ec21085de2c68415d683e2c0a61ae41b WatchSource:0}: Error finding container 7a00bf2b94b27e063e7d7ec9336a28a0ec21085de2c68415d683e2c0a61ae41b: Status 404 returned error can't find the container with id 7a00bf2b94b27e063e7d7ec9336a28a0ec21085de2c68415d683e2c0a61ae41b Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.096047 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerStarted","Data":"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd"} Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.098541 4998 generic.go:334] "Generic (PLEG): container finished" podID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerID="2b5e3ba75ddcd3e43e3471238dc91e2dbdf4753a7400af9ece055cd89bc88cb3" exitCode=0 Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.098624 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerDied","Data":"2b5e3ba75ddcd3e43e3471238dc91e2dbdf4753a7400af9ece055cd89bc88cb3"} Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.098648 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerStarted","Data":"7a00bf2b94b27e063e7d7ec9336a28a0ec21085de2c68415d683e2c0a61ae41b"} Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.100688 4998 generic.go:334] "Generic (PLEG): container finished" podID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerID="3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c" exitCode=0 Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.100713 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerDied","Data":"3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c"} Feb 03 07:51:27 crc kubenswrapper[4998]: I0203 07:51:27.128570 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v74nj" podStartSLOduration=2.693627707 podStartE2EDuration="5.128548249s" podCreationTimestamp="2026-02-03 07:51:22 +0000 UTC" firstStartedPulling="2026-02-03 07:51:24.070413371 +0000 UTC m=+3922.357107177" lastFinishedPulling="2026-02-03 07:51:26.505333913 +0000 UTC m=+3924.792027719" observedRunningTime="2026-02-03 07:51:27.121923953 +0000 UTC m=+3925.408617769" watchObservedRunningTime="2026-02-03 07:51:27.128548249 +0000 UTC m=+3925.415242055" Feb 03 07:51:28 crc kubenswrapper[4998]: I0203 07:51:28.109438 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerStarted","Data":"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06"} Feb 03 07:51:28 crc kubenswrapper[4998]: I0203 07:51:28.112921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerStarted","Data":"29dad3f7850ec1a75fa98db1d93c8e72f7f09f80417ff2342626ecd70c924161"} Feb 03 07:51:28 crc kubenswrapper[4998]: I0203 07:51:28.156504 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wz5xc" podStartSLOduration=2.720746698 podStartE2EDuration="4.15648186s" podCreationTimestamp="2026-02-03 07:51:24 +0000 UTC" firstStartedPulling="2026-02-03 07:51:26.085632596 +0000 UTC m=+3924.372326412" lastFinishedPulling="2026-02-03 07:51:27.521367758 +0000 UTC m=+3925.808061574" observedRunningTime="2026-02-03 07:51:28.133260925 +0000 UTC m=+3926.419954751" watchObservedRunningTime="2026-02-03 07:51:28.15648186 +0000 UTC m=+3926.443175666" Feb 03 07:51:29 crc kubenswrapper[4998]: I0203 07:51:29.119705 4998 generic.go:334] "Generic (PLEG): container finished" podID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerID="29dad3f7850ec1a75fa98db1d93c8e72f7f09f80417ff2342626ecd70c924161" exitCode=0 Feb 03 07:51:29 crc kubenswrapper[4998]: I0203 07:51:29.119879 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerDied","Data":"29dad3f7850ec1a75fa98db1d93c8e72f7f09f80417ff2342626ecd70c924161"} Feb 03 07:51:29 crc kubenswrapper[4998]: I0203 07:51:29.427964 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:51:29 crc kubenswrapper[4998]: E0203 07:51:29.428185 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:51:30 crc kubenswrapper[4998]: I0203 07:51:30.141356 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerStarted","Data":"d68d3046e8e09e0e092e2c9a4926d165d379894d65a09aec3cbb4a8ad2dafb91"} Feb 03 07:51:30 crc kubenswrapper[4998]: I0203 07:51:30.164844 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wzn9w" podStartSLOduration=2.623343384 podStartE2EDuration="5.164821901s" podCreationTimestamp="2026-02-03 07:51:25 +0000 UTC" firstStartedPulling="2026-02-03 07:51:27.100960151 +0000 UTC m=+3925.387653957" lastFinishedPulling="2026-02-03 07:51:29.642438668 +0000 UTC m=+3927.929132474" observedRunningTime="2026-02-03 07:51:30.158217785 +0000 UTC m=+3928.444911611" watchObservedRunningTime="2026-02-03 07:51:30.164821901 +0000 UTC m=+3928.451515717" Feb 03 07:51:33 crc kubenswrapper[4998]: I0203 07:51:33.062729 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:33 crc kubenswrapper[4998]: I0203 07:51:33.063103 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:33 crc kubenswrapper[4998]: I0203 07:51:33.108329 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:33 crc kubenswrapper[4998]: I0203 07:51:33.208727 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:34 crc kubenswrapper[4998]: I0203 07:51:34.332727 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:34 crc kubenswrapper[4998]: I0203 07:51:34.854521 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:34 crc kubenswrapper[4998]: I0203 07:51:34.854580 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:34 crc kubenswrapper[4998]: I0203 07:51:34.897268 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.174419 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v74nj" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="registry-server" containerID="cri-o://b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd" gracePeriod=2 Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.258441 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.716266 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.845538 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities\") pod \"bd09c426-92d3-4728-83a7-2c838a61b10e\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.845601 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb462\" (UniqueName: \"kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462\") pod \"bd09c426-92d3-4728-83a7-2c838a61b10e\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.845700 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content\") pod \"bd09c426-92d3-4728-83a7-2c838a61b10e\" (UID: \"bd09c426-92d3-4728-83a7-2c838a61b10e\") " Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.846725 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities" (OuterVolumeSpecName: "utilities") pod "bd09c426-92d3-4728-83a7-2c838a61b10e" (UID: "bd09c426-92d3-4728-83a7-2c838a61b10e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.855456 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462" (OuterVolumeSpecName: "kube-api-access-kb462") pod "bd09c426-92d3-4728-83a7-2c838a61b10e" (UID: "bd09c426-92d3-4728-83a7-2c838a61b10e"). InnerVolumeSpecName "kube-api-access-kb462". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.858746 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.859596 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.903944 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.947603 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:35 crc kubenswrapper[4998]: I0203 07:51:35.947637 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb462\" (UniqueName: \"kubernetes.io/projected/bd09c426-92d3-4728-83a7-2c838a61b10e-kube-api-access-kb462\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.183609 4998 generic.go:334] "Generic (PLEG): container finished" podID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerID="b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd" exitCode=0 Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.183700 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerDied","Data":"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd"} Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.183766 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v74nj" event={"ID":"bd09c426-92d3-4728-83a7-2c838a61b10e","Type":"ContainerDied","Data":"04cb93dbbe8cf053d120731e83b690230156e27c5a35ee58ab822998b744dc26"} Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.183822 4998 scope.go:117] "RemoveContainer" containerID="b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.184059 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v74nj" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.201004 4998 scope.go:117] "RemoveContainer" containerID="683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.233749 4998 scope.go:117] "RemoveContainer" containerID="c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.238400 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.257763 4998 scope.go:117] "RemoveContainer" containerID="b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd" Feb 03 07:51:36 crc kubenswrapper[4998]: E0203 07:51:36.261608 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd\": container with ID starting with b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd not found: ID does not exist" containerID="b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.261665 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd"} err="failed to get container status \"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd\": rpc error: code = NotFound desc = could not find container \"b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd\": container with ID starting with b19a4d1ba60d836d66e6bf98138eced370d63ac03366b8c8613e6b3359db6bdd not found: ID does not exist" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.261701 4998 scope.go:117] "RemoveContainer" containerID="683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed" Feb 03 07:51:36 crc kubenswrapper[4998]: E0203 07:51:36.262054 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed\": container with ID starting with 683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed not found: ID does not exist" containerID="683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.262091 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed"} err="failed to get container status \"683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed\": rpc error: code = NotFound desc = could not find container \"683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed\": container with ID starting with 683d874875ec390c1fa5b02d42b8cd47f7217927d0b25c95d68518a3f5930bed not found: ID does not exist" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.262111 4998 scope.go:117] "RemoveContainer" containerID="c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e" Feb 03 07:51:36 crc kubenswrapper[4998]: E0203 07:51:36.262353 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e\": container with ID starting with c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e not found: ID does not exist" containerID="c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.262383 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e"} err="failed to get container status \"c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e\": rpc error: code = NotFound desc = could not find container \"c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e\": container with ID starting with c287698453c8955910339a3490995539bb7f999022d23d281363faa9bb89532e not found: ID does not exist" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.654385 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd09c426-92d3-4728-83a7-2c838a61b10e" (UID: "bd09c426-92d3-4728-83a7-2c838a61b10e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.656150 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd09c426-92d3-4728-83a7-2c838a61b10e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.815275 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:36 crc kubenswrapper[4998]: I0203 07:51:36.820625 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v74nj"] Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.123124 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.191665 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wz5xc" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="registry-server" containerID="cri-o://28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06" gracePeriod=2 Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.689209 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.773541 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content\") pod \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.773606 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cwkw\" (UniqueName: \"kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw\") pod \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.773740 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities\") pod \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\" (UID: \"8078f9c0-dfb9-4e5a-96b7-0159286933d9\") " Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.774648 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities" (OuterVolumeSpecName: "utilities") pod "8078f9c0-dfb9-4e5a-96b7-0159286933d9" (UID: "8078f9c0-dfb9-4e5a-96b7-0159286933d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.789823 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw" (OuterVolumeSpecName: "kube-api-access-2cwkw") pod "8078f9c0-dfb9-4e5a-96b7-0159286933d9" (UID: "8078f9c0-dfb9-4e5a-96b7-0159286933d9"). InnerVolumeSpecName "kube-api-access-2cwkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.796088 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8078f9c0-dfb9-4e5a-96b7-0159286933d9" (UID: "8078f9c0-dfb9-4e5a-96b7-0159286933d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.875702 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.875752 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cwkw\" (UniqueName: \"kubernetes.io/projected/8078f9c0-dfb9-4e5a-96b7-0159286933d9-kube-api-access-2cwkw\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:37 crc kubenswrapper[4998]: I0203 07:51:37.875767 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8078f9c0-dfb9-4e5a-96b7-0159286933d9-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.200541 4998 generic.go:334] "Generic (PLEG): container finished" podID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerID="28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06" exitCode=0 Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.200653 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerDied","Data":"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06"} Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.200714 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wz5xc" event={"ID":"8078f9c0-dfb9-4e5a-96b7-0159286933d9","Type":"ContainerDied","Data":"e6248d50231e1e1b8dedbd4b4242a5bd646d25ae63aa32334a4aee1e26bff70b"} Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.200734 4998 scope.go:117] "RemoveContainer" containerID="28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.201109 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wz5xc" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.220170 4998 scope.go:117] "RemoveContainer" containerID="3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.233424 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.238325 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wz5xc"] Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.254179 4998 scope.go:117] "RemoveContainer" containerID="21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.276421 4998 scope.go:117] "RemoveContainer" containerID="28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06" Feb 03 07:51:38 crc kubenswrapper[4998]: E0203 07:51:38.276901 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06\": container with ID starting with 28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06 not found: ID does not exist" containerID="28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.276945 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06"} err="failed to get container status \"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06\": rpc error: code = NotFound desc = could not find container \"28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06\": container with ID starting with 28ec430babefe2ea897f60d4536b78b839e0fea8db46cd6de5c2e87d2e0afa06 not found: ID does not exist" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.276974 4998 scope.go:117] "RemoveContainer" containerID="3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c" Feb 03 07:51:38 crc kubenswrapper[4998]: E0203 07:51:38.277233 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c\": container with ID starting with 3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c not found: ID does not exist" containerID="3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.277259 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c"} err="failed to get container status \"3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c\": rpc error: code = NotFound desc = could not find container \"3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c\": container with ID starting with 3bc82457ed2f21d318170bb8b57e00036e70903e5d9b8996d1170c91d20a938c not found: ID does not exist" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.277279 4998 scope.go:117] "RemoveContainer" containerID="21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11" Feb 03 07:51:38 crc kubenswrapper[4998]: E0203 07:51:38.277575 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11\": container with ID starting with 21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11 not found: ID does not exist" containerID="21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.277634 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11"} err="failed to get container status \"21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11\": rpc error: code = NotFound desc = could not find container \"21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11\": container with ID starting with 21c32803cfcfd5fecd8cd696d2c891a5e8ef0604f1ca9be60f0b432374b0bf11 not found: ID does not exist" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.440178 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" path="/var/lib/kubelet/pods/8078f9c0-dfb9-4e5a-96b7-0159286933d9/volumes" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.441034 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" path="/var/lib/kubelet/pods/bd09c426-92d3-4728-83a7-2c838a61b10e/volumes" Feb 03 07:51:38 crc kubenswrapper[4998]: I0203 07:51:38.525929 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:39 crc kubenswrapper[4998]: I0203 07:51:39.208615 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wzn9w" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="registry-server" containerID="cri-o://d68d3046e8e09e0e092e2c9a4926d165d379894d65a09aec3cbb4a8ad2dafb91" gracePeriod=2 Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.217245 4998 generic.go:334] "Generic (PLEG): container finished" podID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerID="d68d3046e8e09e0e092e2c9a4926d165d379894d65a09aec3cbb4a8ad2dafb91" exitCode=0 Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.217307 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerDied","Data":"d68d3046e8e09e0e092e2c9a4926d165d379894d65a09aec3cbb4a8ad2dafb91"} Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.768401 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.814955 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content\") pod \"944a5090-1b21-4050-a7d5-33e1cd64be97\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.815004 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities\") pod \"944a5090-1b21-4050-a7d5-33e1cd64be97\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.815134 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx56k\" (UniqueName: \"kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k\") pod \"944a5090-1b21-4050-a7d5-33e1cd64be97\" (UID: \"944a5090-1b21-4050-a7d5-33e1cd64be97\") " Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.816038 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities" (OuterVolumeSpecName: "utilities") pod "944a5090-1b21-4050-a7d5-33e1cd64be97" (UID: "944a5090-1b21-4050-a7d5-33e1cd64be97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.821122 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k" (OuterVolumeSpecName: "kube-api-access-qx56k") pod "944a5090-1b21-4050-a7d5-33e1cd64be97" (UID: "944a5090-1b21-4050-a7d5-33e1cd64be97"). InnerVolumeSpecName "kube-api-access-qx56k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.917242 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx56k\" (UniqueName: \"kubernetes.io/projected/944a5090-1b21-4050-a7d5-33e1cd64be97-kube-api-access-qx56k\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.917280 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:40 crc kubenswrapper[4998]: I0203 07:51:40.944753 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "944a5090-1b21-4050-a7d5-33e1cd64be97" (UID: "944a5090-1b21-4050-a7d5-33e1cd64be97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.018495 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944a5090-1b21-4050-a7d5-33e1cd64be97-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.229752 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wzn9w" event={"ID":"944a5090-1b21-4050-a7d5-33e1cd64be97","Type":"ContainerDied","Data":"7a00bf2b94b27e063e7d7ec9336a28a0ec21085de2c68415d683e2c0a61ae41b"} Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.229839 4998 scope.go:117] "RemoveContainer" containerID="d68d3046e8e09e0e092e2c9a4926d165d379894d65a09aec3cbb4a8ad2dafb91" Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.229954 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wzn9w" Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.252334 4998 scope.go:117] "RemoveContainer" containerID="29dad3f7850ec1a75fa98db1d93c8e72f7f09f80417ff2342626ecd70c924161" Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.264919 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.273025 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wzn9w"] Feb 03 07:51:41 crc kubenswrapper[4998]: I0203 07:51:41.286973 4998 scope.go:117] "RemoveContainer" containerID="2b5e3ba75ddcd3e43e3471238dc91e2dbdf4753a7400af9ece055cd89bc88cb3" Feb 03 07:51:42 crc kubenswrapper[4998]: I0203 07:51:42.437033 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" path="/var/lib/kubelet/pods/944a5090-1b21-4050-a7d5-33e1cd64be97/volumes" Feb 03 07:51:43 crc kubenswrapper[4998]: I0203 07:51:43.427156 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:51:43 crc kubenswrapper[4998]: E0203 07:51:43.427416 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:51:57 crc kubenswrapper[4998]: I0203 07:51:57.427312 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:51:57 crc kubenswrapper[4998]: E0203 07:51:57.427931 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:52:11 crc kubenswrapper[4998]: I0203 07:52:11.427256 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:52:11 crc kubenswrapper[4998]: E0203 07:52:11.428148 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:52:22 crc kubenswrapper[4998]: I0203 07:52:22.447663 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:52:22 crc kubenswrapper[4998]: E0203 07:52:22.449005 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:52:36 crc kubenswrapper[4998]: I0203 07:52:36.427130 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:52:36 crc kubenswrapper[4998]: E0203 07:52:36.427845 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:52:47 crc kubenswrapper[4998]: I0203 07:52:47.427315 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:52:47 crc kubenswrapper[4998]: E0203 07:52:47.428114 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:52:58 crc kubenswrapper[4998]: I0203 07:52:58.427193 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:52:58 crc kubenswrapper[4998]: E0203 07:52:58.427948 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:53:09 crc kubenswrapper[4998]: I0203 07:53:09.428315 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:53:09 crc kubenswrapper[4998]: E0203 07:53:09.430267 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:53:21 crc kubenswrapper[4998]: I0203 07:53:21.427413 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:53:21 crc kubenswrapper[4998]: E0203 07:53:21.428142 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:53:36 crc kubenswrapper[4998]: I0203 07:53:36.427382 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:53:36 crc kubenswrapper[4998]: E0203 07:53:36.428176 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:53:50 crc kubenswrapper[4998]: I0203 07:53:50.427629 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:53:50 crc kubenswrapper[4998]: E0203 07:53:50.428430 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:54:01 crc kubenswrapper[4998]: I0203 07:54:01.426956 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:54:01 crc kubenswrapper[4998]: E0203 07:54:01.427754 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 07:54:13 crc kubenswrapper[4998]: I0203 07:54:13.427864 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:54:14 crc kubenswrapper[4998]: I0203 07:54:14.337042 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b"} Feb 03 07:56:42 crc kubenswrapper[4998]: I0203 07:56:42.754105 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:56:42 crc kubenswrapper[4998]: I0203 07:56:42.755927 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:57:12 crc kubenswrapper[4998]: I0203 07:57:12.753921 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:57:12 crc kubenswrapper[4998]: I0203 07:57:12.754838 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.754809 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.755448 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.755528 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.756189 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.756257 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b" gracePeriod=600 Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.902275 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b" exitCode=0 Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.902341 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b"} Feb 03 07:57:42 crc kubenswrapper[4998]: I0203 07:57:42.902392 4998 scope.go:117] "RemoveContainer" containerID="f71853e9e2d9cfb53b9308a2cb0b35d9052820e2de6fb1b575190788cc7cc881" Feb 03 07:57:43 crc kubenswrapper[4998]: I0203 07:57:43.910336 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6"} Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.174730 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh"] Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175657 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175674 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175687 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175695 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175715 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175724 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175735 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175743 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175754 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175762 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175801 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175809 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="extract-content" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175827 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175834 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="extract-utilities" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175851 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175858 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: E0203 08:00:00.175875 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.175884 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.176036 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="944a5090-1b21-4050-a7d5-33e1cd64be97" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.176056 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd09c426-92d3-4728-83a7-2c838a61b10e" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.176078 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8078f9c0-dfb9-4e5a-96b7-0159286933d9" containerName="registry-server" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.176637 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.179354 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.179416 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.191369 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh"] Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.270396 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.270441 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.270529 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlc5f\" (UniqueName: \"kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.371527 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.371589 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.371628 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlc5f\" (UniqueName: \"kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.372701 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.383768 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.390674 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlc5f\" (UniqueName: \"kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f\") pod \"collect-profiles-29501760-mfksh\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.495237 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:00 crc kubenswrapper[4998]: I0203 08:00:00.927282 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh"] Feb 03 08:00:01 crc kubenswrapper[4998]: I0203 08:00:01.391381 4998 generic.go:334] "Generic (PLEG): container finished" podID="60117665-bb1e-46e2-b0c5-0eac0908882e" containerID="0655f84869b0708f9937ecebdf93f6bc9999c3516b089f5a3725f4d409840c75" exitCode=0 Feb 03 08:00:01 crc kubenswrapper[4998]: I0203 08:00:01.391986 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" event={"ID":"60117665-bb1e-46e2-b0c5-0eac0908882e","Type":"ContainerDied","Data":"0655f84869b0708f9937ecebdf93f6bc9999c3516b089f5a3725f4d409840c75"} Feb 03 08:00:01 crc kubenswrapper[4998]: I0203 08:00:01.392078 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" event={"ID":"60117665-bb1e-46e2-b0c5-0eac0908882e","Type":"ContainerStarted","Data":"6251ed0470a40348c34c0e1a026e5d9afa015a09dfcd32b1034411ad34071654"} Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.732199 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.797487 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume\") pod \"60117665-bb1e-46e2-b0c5-0eac0908882e\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.797534 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume\") pod \"60117665-bb1e-46e2-b0c5-0eac0908882e\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.797604 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlc5f\" (UniqueName: \"kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f\") pod \"60117665-bb1e-46e2-b0c5-0eac0908882e\" (UID: \"60117665-bb1e-46e2-b0c5-0eac0908882e\") " Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.798319 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume" (OuterVolumeSpecName: "config-volume") pod "60117665-bb1e-46e2-b0c5-0eac0908882e" (UID: "60117665-bb1e-46e2-b0c5-0eac0908882e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.802713 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "60117665-bb1e-46e2-b0c5-0eac0908882e" (UID: "60117665-bb1e-46e2-b0c5-0eac0908882e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.803313 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f" (OuterVolumeSpecName: "kube-api-access-wlc5f") pod "60117665-bb1e-46e2-b0c5-0eac0908882e" (UID: "60117665-bb1e-46e2-b0c5-0eac0908882e"). InnerVolumeSpecName "kube-api-access-wlc5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.898813 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlc5f\" (UniqueName: \"kubernetes.io/projected/60117665-bb1e-46e2-b0c5-0eac0908882e-kube-api-access-wlc5f\") on node \"crc\" DevicePath \"\"" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.898849 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60117665-bb1e-46e2-b0c5-0eac0908882e-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:00:02 crc kubenswrapper[4998]: I0203 08:00:02.898860 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/60117665-bb1e-46e2-b0c5-0eac0908882e-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:00:03 crc kubenswrapper[4998]: I0203 08:00:03.404835 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" event={"ID":"60117665-bb1e-46e2-b0c5-0eac0908882e","Type":"ContainerDied","Data":"6251ed0470a40348c34c0e1a026e5d9afa015a09dfcd32b1034411ad34071654"} Feb 03 08:00:03 crc kubenswrapper[4998]: I0203 08:00:03.404872 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6251ed0470a40348c34c0e1a026e5d9afa015a09dfcd32b1034411ad34071654" Feb 03 08:00:03 crc kubenswrapper[4998]: I0203 08:00:03.404880 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh" Feb 03 08:00:03 crc kubenswrapper[4998]: I0203 08:00:03.807308 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4"] Feb 03 08:00:03 crc kubenswrapper[4998]: I0203 08:00:03.814036 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-8cnh4"] Feb 03 08:00:04 crc kubenswrapper[4998]: I0203 08:00:04.437526 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e2485eb-fa88-4a87-b195-1b3dffc5075d" path="/var/lib/kubelet/pods/9e2485eb-fa88-4a87-b195-1b3dffc5075d/volumes" Feb 03 08:00:07 crc kubenswrapper[4998]: I0203 08:00:07.001085 4998 scope.go:117] "RemoveContainer" containerID="9af62ff8931479f3b4eabeb56f5dab8491a57e44911d3154f347fc10709e499e" Feb 03 08:00:12 crc kubenswrapper[4998]: I0203 08:00:12.754099 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:00:12 crc kubenswrapper[4998]: I0203 08:00:12.754603 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:00:42 crc kubenswrapper[4998]: I0203 08:00:42.754078 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:00:42 crc kubenswrapper[4998]: I0203 08:00:42.754692 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.183610 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:00:55 crc kubenswrapper[4998]: E0203 08:00:55.185040 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60117665-bb1e-46e2-b0c5-0eac0908882e" containerName="collect-profiles" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.185066 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="60117665-bb1e-46e2-b0c5-0eac0908882e" containerName="collect-profiles" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.185281 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="60117665-bb1e-46e2-b0c5-0eac0908882e" containerName="collect-profiles" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.186610 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.197096 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.341525 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjz8q\" (UniqueName: \"kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.341651 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.342149 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.443448 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.443593 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.443715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjz8q\" (UniqueName: \"kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.444248 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.444288 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.464596 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjz8q\" (UniqueName: \"kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q\") pod \"certified-operators-kg5fq\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.524168 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:00:55 crc kubenswrapper[4998]: I0203 08:00:55.982861 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:00:56 crc kubenswrapper[4998]: I0203 08:00:56.771277 4998 generic.go:334] "Generic (PLEG): container finished" podID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerID="7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4" exitCode=0 Feb 03 08:00:56 crc kubenswrapper[4998]: I0203 08:00:56.771348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerDied","Data":"7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4"} Feb 03 08:00:56 crc kubenswrapper[4998]: I0203 08:00:56.771423 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerStarted","Data":"5502667e600f9e0b8a531a5bcaf346253252b8492583efa0b8cf1e25d737eb10"} Feb 03 08:00:56 crc kubenswrapper[4998]: I0203 08:00:56.773287 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:00:57 crc kubenswrapper[4998]: I0203 08:00:57.781038 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerDied","Data":"6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163"} Feb 03 08:00:57 crc kubenswrapper[4998]: I0203 08:00:57.781939 4998 generic.go:334] "Generic (PLEG): container finished" podID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerID="6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163" exitCode=0 Feb 03 08:00:59 crc kubenswrapper[4998]: I0203 08:00:59.804088 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerStarted","Data":"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b"} Feb 03 08:00:59 crc kubenswrapper[4998]: I0203 08:00:59.822028 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kg5fq" podStartSLOduration=2.321718971 podStartE2EDuration="4.822012321s" podCreationTimestamp="2026-02-03 08:00:55 +0000 UTC" firstStartedPulling="2026-02-03 08:00:56.773025952 +0000 UTC m=+4495.059719758" lastFinishedPulling="2026-02-03 08:00:59.273319302 +0000 UTC m=+4497.560013108" observedRunningTime="2026-02-03 08:00:59.818701317 +0000 UTC m=+4498.105395133" watchObservedRunningTime="2026-02-03 08:00:59.822012321 +0000 UTC m=+4498.108706127" Feb 03 08:01:05 crc kubenswrapper[4998]: I0203 08:01:05.524601 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:05 crc kubenswrapper[4998]: I0203 08:01:05.525280 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:05 crc kubenswrapper[4998]: I0203 08:01:05.568466 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:05 crc kubenswrapper[4998]: I0203 08:01:05.909484 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:05 crc kubenswrapper[4998]: I0203 08:01:05.967717 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:01:07 crc kubenswrapper[4998]: I0203 08:01:07.871277 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kg5fq" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="registry-server" containerID="cri-o://9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b" gracePeriod=2 Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.791299 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.878369 4998 generic.go:334] "Generic (PLEG): container finished" podID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerID="9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b" exitCode=0 Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.878412 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerDied","Data":"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b"} Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.878435 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5fq" event={"ID":"cfca0965-5565-4f00-bbf6-dad9ba724544","Type":"ContainerDied","Data":"5502667e600f9e0b8a531a5bcaf346253252b8492583efa0b8cf1e25d737eb10"} Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.878434 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5fq" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.878449 4998 scope.go:117] "RemoveContainer" containerID="9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.893382 4998 scope.go:117] "RemoveContainer" containerID="6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.912150 4998 scope.go:117] "RemoveContainer" containerID="7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.930082 4998 scope.go:117] "RemoveContainer" containerID="9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b" Feb 03 08:01:08 crc kubenswrapper[4998]: E0203 08:01:08.930514 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b\": container with ID starting with 9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b not found: ID does not exist" containerID="9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.930556 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b"} err="failed to get container status \"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b\": rpc error: code = NotFound desc = could not find container \"9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b\": container with ID starting with 9e82851a53b7318e550cf37fa66455d856c13c22bde3c2449bda2d6bc25da76b not found: ID does not exist" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.930584 4998 scope.go:117] "RemoveContainer" containerID="6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163" Feb 03 08:01:08 crc kubenswrapper[4998]: E0203 08:01:08.930927 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163\": container with ID starting with 6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163 not found: ID does not exist" containerID="6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.930956 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163"} err="failed to get container status \"6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163\": rpc error: code = NotFound desc = could not find container \"6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163\": container with ID starting with 6675a18eecf0e1ead44eaaf6a9f289be1fe16e5d83ab5597679112fa33ca4163 not found: ID does not exist" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.930971 4998 scope.go:117] "RemoveContainer" containerID="7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4" Feb 03 08:01:08 crc kubenswrapper[4998]: E0203 08:01:08.931280 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4\": container with ID starting with 7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4 not found: ID does not exist" containerID="7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.931305 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4"} err="failed to get container status \"7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4\": rpc error: code = NotFound desc = could not find container \"7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4\": container with ID starting with 7ed0938363b8683b04d33357893e2306db343ddcff61c22cf435a7f0da3e6ac4 not found: ID does not exist" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.948101 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities\") pod \"cfca0965-5565-4f00-bbf6-dad9ba724544\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.948151 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjz8q\" (UniqueName: \"kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q\") pod \"cfca0965-5565-4f00-bbf6-dad9ba724544\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.948185 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content\") pod \"cfca0965-5565-4f00-bbf6-dad9ba724544\" (UID: \"cfca0965-5565-4f00-bbf6-dad9ba724544\") " Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.948938 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities" (OuterVolumeSpecName: "utilities") pod "cfca0965-5565-4f00-bbf6-dad9ba724544" (UID: "cfca0965-5565-4f00-bbf6-dad9ba724544"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:01:08 crc kubenswrapper[4998]: I0203 08:01:08.953659 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q" (OuterVolumeSpecName: "kube-api-access-qjz8q") pod "cfca0965-5565-4f00-bbf6-dad9ba724544" (UID: "cfca0965-5565-4f00-bbf6-dad9ba724544"). InnerVolumeSpecName "kube-api-access-qjz8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.012673 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cfca0965-5565-4f00-bbf6-dad9ba724544" (UID: "cfca0965-5565-4f00-bbf6-dad9ba724544"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.049875 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.049917 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjz8q\" (UniqueName: \"kubernetes.io/projected/cfca0965-5565-4f00-bbf6-dad9ba724544-kube-api-access-qjz8q\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.049931 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfca0965-5565-4f00-bbf6-dad9ba724544-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.206623 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:01:09 crc kubenswrapper[4998]: I0203 08:01:09.213284 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kg5fq"] Feb 03 08:01:10 crc kubenswrapper[4998]: I0203 08:01:10.437206 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" path="/var/lib/kubelet/pods/cfca0965-5565-4f00-bbf6-dad9ba724544/volumes" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.754725 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.755096 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.755133 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.755710 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.755773 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" gracePeriod=600 Feb 03 08:01:12 crc kubenswrapper[4998]: E0203 08:01:12.886213 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.917515 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" exitCode=0 Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.917574 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6"} Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.917620 4998 scope.go:117] "RemoveContainer" containerID="9026719ad829d92397bd153daa5c6881968d639a8c0ca3ba0951b7a6674c8a7b" Feb 03 08:01:12 crc kubenswrapper[4998]: I0203 08:01:12.917997 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:01:12 crc kubenswrapper[4998]: E0203 08:01:12.918221 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:01:23 crc kubenswrapper[4998]: I0203 08:01:23.427672 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:01:23 crc kubenswrapper[4998]: E0203 08:01:23.428469 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:01:36 crc kubenswrapper[4998]: I0203 08:01:36.427524 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:01:36 crc kubenswrapper[4998]: E0203 08:01:36.428407 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.286030 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:39 crc kubenswrapper[4998]: E0203 08:01:39.286956 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="registry-server" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.286970 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="registry-server" Feb 03 08:01:39 crc kubenswrapper[4998]: E0203 08:01:39.286992 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="extract-content" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.287000 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="extract-content" Feb 03 08:01:39 crc kubenswrapper[4998]: E0203 08:01:39.287019 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="extract-utilities" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.287027 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="extract-utilities" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.287193 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfca0965-5565-4f00-bbf6-dad9ba724544" containerName="registry-server" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.288385 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.289677 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.331983 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.332080 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh4gz\" (UniqueName: \"kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.332132 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.433183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.433250 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh4gz\" (UniqueName: \"kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.433287 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.433690 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.433707 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.470303 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh4gz\" (UniqueName: \"kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz\") pod \"redhat-marketplace-86pcj\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:39 crc kubenswrapper[4998]: I0203 08:01:39.609735 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:40 crc kubenswrapper[4998]: I0203 08:01:40.036672 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:40 crc kubenswrapper[4998]: I0203 08:01:40.093318 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerStarted","Data":"15746f1854d42a499c9605c46c19886baf989df6d9d80e98a5367b4f9efe1bfb"} Feb 03 08:01:40 crc kubenswrapper[4998]: E0203 08:01:40.322953 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26382d8c_d94a_464f_ba0d_8ee6c1309f4f.slice/crio-conmon-e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26382d8c_d94a_464f_ba0d_8ee6c1309f4f.slice/crio-e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a.scope\": RecentStats: unable to find data in memory cache]" Feb 03 08:01:41 crc kubenswrapper[4998]: I0203 08:01:41.105354 4998 generic.go:334] "Generic (PLEG): container finished" podID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerID="e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a" exitCode=0 Feb 03 08:01:41 crc kubenswrapper[4998]: I0203 08:01:41.105447 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerDied","Data":"e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a"} Feb 03 08:01:42 crc kubenswrapper[4998]: I0203 08:01:42.113917 4998 generic.go:334] "Generic (PLEG): container finished" podID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerID="097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512" exitCode=0 Feb 03 08:01:42 crc kubenswrapper[4998]: I0203 08:01:42.114015 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerDied","Data":"097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512"} Feb 03 08:01:43 crc kubenswrapper[4998]: I0203 08:01:43.127106 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerStarted","Data":"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361"} Feb 03 08:01:43 crc kubenswrapper[4998]: I0203 08:01:43.153063 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-86pcj" podStartSLOduration=2.705693633 podStartE2EDuration="4.15303561s" podCreationTimestamp="2026-02-03 08:01:39 +0000 UTC" firstStartedPulling="2026-02-03 08:01:41.110832742 +0000 UTC m=+4539.397526548" lastFinishedPulling="2026-02-03 08:01:42.558174719 +0000 UTC m=+4540.844868525" observedRunningTime="2026-02-03 08:01:43.145352001 +0000 UTC m=+4541.432045827" watchObservedRunningTime="2026-02-03 08:01:43.15303561 +0000 UTC m=+4541.439729416" Feb 03 08:01:48 crc kubenswrapper[4998]: I0203 08:01:48.428040 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:01:48 crc kubenswrapper[4998]: E0203 08:01:48.428616 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:01:49 crc kubenswrapper[4998]: I0203 08:01:49.609925 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:49 crc kubenswrapper[4998]: I0203 08:01:49.611214 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:49 crc kubenswrapper[4998]: I0203 08:01:49.649181 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:50 crc kubenswrapper[4998]: I0203 08:01:50.238463 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:50 crc kubenswrapper[4998]: I0203 08:01:50.282967 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.205491 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-86pcj" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="registry-server" containerID="cri-o://d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361" gracePeriod=2 Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.746823 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.821197 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities\") pod \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.821349 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content\") pod \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.821440 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dh4gz\" (UniqueName: \"kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz\") pod \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\" (UID: \"26382d8c-d94a-464f-ba0d-8ee6c1309f4f\") " Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.822307 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities" (OuterVolumeSpecName: "utilities") pod "26382d8c-d94a-464f-ba0d-8ee6c1309f4f" (UID: "26382d8c-d94a-464f-ba0d-8ee6c1309f4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.826984 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz" (OuterVolumeSpecName: "kube-api-access-dh4gz") pod "26382d8c-d94a-464f-ba0d-8ee6c1309f4f" (UID: "26382d8c-d94a-464f-ba0d-8ee6c1309f4f"). InnerVolumeSpecName "kube-api-access-dh4gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.848719 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "26382d8c-d94a-464f-ba0d-8ee6c1309f4f" (UID: "26382d8c-d94a-464f-ba0d-8ee6c1309f4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.922893 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dh4gz\" (UniqueName: \"kubernetes.io/projected/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-kube-api-access-dh4gz\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.922931 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:52 crc kubenswrapper[4998]: I0203 08:01:52.922946 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26382d8c-d94a-464f-ba0d-8ee6c1309f4f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.216727 4998 generic.go:334] "Generic (PLEG): container finished" podID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerID="d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361" exitCode=0 Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.216800 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerDied","Data":"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361"} Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.218427 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-86pcj" event={"ID":"26382d8c-d94a-464f-ba0d-8ee6c1309f4f","Type":"ContainerDied","Data":"15746f1854d42a499c9605c46c19886baf989df6d9d80e98a5367b4f9efe1bfb"} Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.218570 4998 scope.go:117] "RemoveContainer" containerID="d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.216834 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-86pcj" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.242720 4998 scope.go:117] "RemoveContainer" containerID="097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.260871 4998 scope.go:117] "RemoveContainer" containerID="e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.278215 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.284146 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-86pcj"] Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.293753 4998 scope.go:117] "RemoveContainer" containerID="d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361" Feb 03 08:01:53 crc kubenswrapper[4998]: E0203 08:01:53.294177 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361\": container with ID starting with d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361 not found: ID does not exist" containerID="d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.294214 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361"} err="failed to get container status \"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361\": rpc error: code = NotFound desc = could not find container \"d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361\": container with ID starting with d1a0e6f6404394b63b975b7502d538bafecb237f0ff5b3dcf95c88dd05673361 not found: ID does not exist" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.294239 4998 scope.go:117] "RemoveContainer" containerID="097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512" Feb 03 08:01:53 crc kubenswrapper[4998]: E0203 08:01:53.294521 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512\": container with ID starting with 097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512 not found: ID does not exist" containerID="097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.294555 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512"} err="failed to get container status \"097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512\": rpc error: code = NotFound desc = could not find container \"097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512\": container with ID starting with 097bc94ca33c41f0f62164dc397765b4ebfe01665ce9889ee025cee7f2b22512 not found: ID does not exist" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.294574 4998 scope.go:117] "RemoveContainer" containerID="e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a" Feb 03 08:01:53 crc kubenswrapper[4998]: E0203 08:01:53.294917 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a\": container with ID starting with e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a not found: ID does not exist" containerID="e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a" Feb 03 08:01:53 crc kubenswrapper[4998]: I0203 08:01:53.294944 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a"} err="failed to get container status \"e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a\": rpc error: code = NotFound desc = could not find container \"e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a\": container with ID starting with e8e010ca5c4577472a8549ebe3554f085b00278aca0ba74f55473fd5dea4a09a not found: ID does not exist" Feb 03 08:01:54 crc kubenswrapper[4998]: I0203 08:01:54.435749 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" path="/var/lib/kubelet/pods/26382d8c-d94a-464f-ba0d-8ee6c1309f4f/volumes" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.043333 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:01:58 crc kubenswrapper[4998]: E0203 08:01:58.043835 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="extract-utilities" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.043846 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="extract-utilities" Feb 03 08:01:58 crc kubenswrapper[4998]: E0203 08:01:58.043895 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="extract-content" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.043901 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="extract-content" Feb 03 08:01:58 crc kubenswrapper[4998]: E0203 08:01:58.043910 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="registry-server" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.043917 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="registry-server" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.044034 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="26382d8c-d94a-464f-ba0d-8ee6c1309f4f" containerName="registry-server" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.044929 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.057211 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.089509 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.089568 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.089621 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crqh7\" (UniqueName: \"kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.190985 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.191467 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.191575 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.191670 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crqh7\" (UniqueName: \"kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.191919 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.274673 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crqh7\" (UniqueName: \"kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7\") pod \"community-operators-6kmkh\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.373178 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:01:58 crc kubenswrapper[4998]: I0203 08:01:58.623220 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:01:59 crc kubenswrapper[4998]: I0203 08:01:59.260767 4998 generic.go:334] "Generic (PLEG): container finished" podID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerID="2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1" exitCode=0 Feb 03 08:01:59 crc kubenswrapper[4998]: I0203 08:01:59.260830 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerDied","Data":"2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1"} Feb 03 08:01:59 crc kubenswrapper[4998]: I0203 08:01:59.260859 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerStarted","Data":"b40346617a0f3036f7ca3f66be648c9a9ed392a9a19e85b5d24cd2ff8950923d"} Feb 03 08:01:59 crc kubenswrapper[4998]: I0203 08:01:59.427698 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:01:59 crc kubenswrapper[4998]: E0203 08:01:59.428206 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:02:00 crc kubenswrapper[4998]: E0203 08:02:00.659600 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60e7dbf3_b326_4358_a8b3_d3780282e84a.slice/crio-conmon-861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e.scope\": RecentStats: unable to find data in memory cache]" Feb 03 08:02:01 crc kubenswrapper[4998]: I0203 08:02:01.275950 4998 generic.go:334] "Generic (PLEG): container finished" podID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerID="861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e" exitCode=0 Feb 03 08:02:01 crc kubenswrapper[4998]: I0203 08:02:01.276005 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerDied","Data":"861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e"} Feb 03 08:02:02 crc kubenswrapper[4998]: I0203 08:02:02.285273 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerStarted","Data":"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b"} Feb 03 08:02:08 crc kubenswrapper[4998]: I0203 08:02:08.374053 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:08 crc kubenswrapper[4998]: I0203 08:02:08.374546 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:08 crc kubenswrapper[4998]: I0203 08:02:08.601910 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:08 crc kubenswrapper[4998]: I0203 08:02:08.621156 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6kmkh" podStartSLOduration=8.195003334 podStartE2EDuration="10.621137816s" podCreationTimestamp="2026-02-03 08:01:58 +0000 UTC" firstStartedPulling="2026-02-03 08:01:59.265010264 +0000 UTC m=+4557.551704070" lastFinishedPulling="2026-02-03 08:02:01.691144726 +0000 UTC m=+4559.977838552" observedRunningTime="2026-02-03 08:02:02.303213397 +0000 UTC m=+4560.589907223" watchObservedRunningTime="2026-02-03 08:02:08.621137816 +0000 UTC m=+4566.907831612" Feb 03 08:02:09 crc kubenswrapper[4998]: I0203 08:02:09.373431 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:09 crc kubenswrapper[4998]: I0203 08:02:09.839468 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.351328 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6kmkh" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="registry-server" containerID="cri-o://472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b" gracePeriod=2 Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.753753 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.786722 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crqh7\" (UniqueName: \"kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7\") pod \"60e7dbf3-b326-4358-a8b3-d3780282e84a\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.786809 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content\") pod \"60e7dbf3-b326-4358-a8b3-d3780282e84a\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.786854 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities\") pod \"60e7dbf3-b326-4358-a8b3-d3780282e84a\" (UID: \"60e7dbf3-b326-4358-a8b3-d3780282e84a\") " Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.788229 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities" (OuterVolumeSpecName: "utilities") pod "60e7dbf3-b326-4358-a8b3-d3780282e84a" (UID: "60e7dbf3-b326-4358-a8b3-d3780282e84a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.793584 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7" (OuterVolumeSpecName: "kube-api-access-crqh7") pod "60e7dbf3-b326-4358-a8b3-d3780282e84a" (UID: "60e7dbf3-b326-4358-a8b3-d3780282e84a"). InnerVolumeSpecName "kube-api-access-crqh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.849640 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60e7dbf3-b326-4358-a8b3-d3780282e84a" (UID: "60e7dbf3-b326-4358-a8b3-d3780282e84a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.888284 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.888337 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e7dbf3-b326-4358-a8b3-d3780282e84a-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:02:11 crc kubenswrapper[4998]: I0203 08:02:11.888349 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crqh7\" (UniqueName: \"kubernetes.io/projected/60e7dbf3-b326-4358-a8b3-d3780282e84a-kube-api-access-crqh7\") on node \"crc\" DevicePath \"\"" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.361371 4998 generic.go:334] "Generic (PLEG): container finished" podID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerID="472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b" exitCode=0 Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.361434 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerDied","Data":"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b"} Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.361469 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kmkh" event={"ID":"60e7dbf3-b326-4358-a8b3-d3780282e84a","Type":"ContainerDied","Data":"b40346617a0f3036f7ca3f66be648c9a9ed392a9a19e85b5d24cd2ff8950923d"} Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.361476 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kmkh" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.361487 4998 scope.go:117] "RemoveContainer" containerID="472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.379489 4998 scope.go:117] "RemoveContainer" containerID="861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.402605 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.409530 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6kmkh"] Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.434049 4998 scope.go:117] "RemoveContainer" containerID="2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.438896 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" path="/var/lib/kubelet/pods/60e7dbf3-b326-4358-a8b3-d3780282e84a/volumes" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.449475 4998 scope.go:117] "RemoveContainer" containerID="472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b" Feb 03 08:02:12 crc kubenswrapper[4998]: E0203 08:02:12.449859 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b\": container with ID starting with 472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b not found: ID does not exist" containerID="472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.449892 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b"} err="failed to get container status \"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b\": rpc error: code = NotFound desc = could not find container \"472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b\": container with ID starting with 472530d4699a14b95470012442faab20aa404975a0c503eac7e9b54e176b693b not found: ID does not exist" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.449910 4998 scope.go:117] "RemoveContainer" containerID="861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e" Feb 03 08:02:12 crc kubenswrapper[4998]: E0203 08:02:12.450140 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e\": container with ID starting with 861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e not found: ID does not exist" containerID="861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.450161 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e"} err="failed to get container status \"861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e\": rpc error: code = NotFound desc = could not find container \"861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e\": container with ID starting with 861e335a173abbb69c8144906b3ccb6579e6cb158ca1655fe67de3f71a3cd57e not found: ID does not exist" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.450174 4998 scope.go:117] "RemoveContainer" containerID="2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1" Feb 03 08:02:12 crc kubenswrapper[4998]: E0203 08:02:12.450515 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1\": container with ID starting with 2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1 not found: ID does not exist" containerID="2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1" Feb 03 08:02:12 crc kubenswrapper[4998]: I0203 08:02:12.450554 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1"} err="failed to get container status \"2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1\": rpc error: code = NotFound desc = could not find container \"2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1\": container with ID starting with 2995366e9a81f0d563b28b6f809ab42d765aa3f1ef7725f26672dae10a14aff1 not found: ID does not exist" Feb 03 08:02:14 crc kubenswrapper[4998]: I0203 08:02:14.427570 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:02:14 crc kubenswrapper[4998]: E0203 08:02:14.428397 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:02:26 crc kubenswrapper[4998]: I0203 08:02:26.428022 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:02:26 crc kubenswrapper[4998]: E0203 08:02:26.428582 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:02:37 crc kubenswrapper[4998]: I0203 08:02:37.427987 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:02:37 crc kubenswrapper[4998]: E0203 08:02:37.428863 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:02:48 crc kubenswrapper[4998]: I0203 08:02:48.427271 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:02:48 crc kubenswrapper[4998]: E0203 08:02:48.428008 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:03:03 crc kubenswrapper[4998]: I0203 08:03:03.427497 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:03:03 crc kubenswrapper[4998]: E0203 08:03:03.428350 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:03:16 crc kubenswrapper[4998]: I0203 08:03:16.428111 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:03:16 crc kubenswrapper[4998]: E0203 08:03:16.428760 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:03:29 crc kubenswrapper[4998]: I0203 08:03:29.428068 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:03:29 crc kubenswrapper[4998]: E0203 08:03:29.428993 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.472345 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:33 crc kubenswrapper[4998]: E0203 08:03:33.473148 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="registry-server" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.473164 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="registry-server" Feb 03 08:03:33 crc kubenswrapper[4998]: E0203 08:03:33.473184 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="extract-utilities" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.473192 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="extract-utilities" Feb 03 08:03:33 crc kubenswrapper[4998]: E0203 08:03:33.473211 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="extract-content" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.473219 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="extract-content" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.473373 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e7dbf3-b326-4358-a8b3-d3780282e84a" containerName="registry-server" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.475862 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.486595 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.521595 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.521742 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.521850 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xvw6\" (UniqueName: \"kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.623354 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.623412 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.623438 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xvw6\" (UniqueName: \"kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.624257 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.624310 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.644639 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xvw6\" (UniqueName: \"kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6\") pod \"redhat-operators-ffl2q\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:33 crc kubenswrapper[4998]: I0203 08:03:33.798480 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:34 crc kubenswrapper[4998]: I0203 08:03:34.110337 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:34 crc kubenswrapper[4998]: I0203 08:03:34.914757 4998 generic.go:334] "Generic (PLEG): container finished" podID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerID="448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6" exitCode=0 Feb 03 08:03:34 crc kubenswrapper[4998]: I0203 08:03:34.914876 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerDied","Data":"448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6"} Feb 03 08:03:34 crc kubenswrapper[4998]: I0203 08:03:34.915398 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerStarted","Data":"ee3e1ca2783dd509c2a9930a4c3fa6567ec170b6249235ca621da8334bf0cae0"} Feb 03 08:03:35 crc kubenswrapper[4998]: I0203 08:03:35.925962 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerStarted","Data":"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e"} Feb 03 08:03:36 crc kubenswrapper[4998]: I0203 08:03:36.937988 4998 generic.go:334] "Generic (PLEG): container finished" podID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerID="03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e" exitCode=0 Feb 03 08:03:36 crc kubenswrapper[4998]: I0203 08:03:36.938128 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerDied","Data":"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e"} Feb 03 08:03:37 crc kubenswrapper[4998]: I0203 08:03:37.948272 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerStarted","Data":"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9"} Feb 03 08:03:37 crc kubenswrapper[4998]: I0203 08:03:37.976805 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ffl2q" podStartSLOduration=2.320792965 podStartE2EDuration="4.976762072s" podCreationTimestamp="2026-02-03 08:03:33 +0000 UTC" firstStartedPulling="2026-02-03 08:03:34.918538119 +0000 UTC m=+4653.205231955" lastFinishedPulling="2026-02-03 08:03:37.574507256 +0000 UTC m=+4655.861201062" observedRunningTime="2026-02-03 08:03:37.973320334 +0000 UTC m=+4656.260014180" watchObservedRunningTime="2026-02-03 08:03:37.976762072 +0000 UTC m=+4656.263455878" Feb 03 08:03:41 crc kubenswrapper[4998]: I0203 08:03:41.427399 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:03:41 crc kubenswrapper[4998]: E0203 08:03:41.427692 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:03:43 crc kubenswrapper[4998]: I0203 08:03:43.799633 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:43 crc kubenswrapper[4998]: I0203 08:03:43.800205 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:43 crc kubenswrapper[4998]: I0203 08:03:43.858534 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:44 crc kubenswrapper[4998]: I0203 08:03:44.046125 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:44 crc kubenswrapper[4998]: I0203 08:03:44.105461 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.017514 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ffl2q" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="registry-server" containerID="cri-o://28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9" gracePeriod=2 Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.422685 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.529129 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xvw6\" (UniqueName: \"kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6\") pod \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.529231 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content\") pod \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.529363 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities\") pod \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\" (UID: \"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc\") " Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.531556 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities" (OuterVolumeSpecName: "utilities") pod "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" (UID: "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.539318 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6" (OuterVolumeSpecName: "kube-api-access-8xvw6") pod "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" (UID: "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc"). InnerVolumeSpecName "kube-api-access-8xvw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.631630 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xvw6\" (UniqueName: \"kubernetes.io/projected/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-kube-api-access-8xvw6\") on node \"crc\" DevicePath \"\"" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.631674 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.678360 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" (UID: "8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:03:46 crc kubenswrapper[4998]: I0203 08:03:46.732729 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.028545 4998 generic.go:334] "Generic (PLEG): container finished" podID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerID="28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9" exitCode=0 Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.028613 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerDied","Data":"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9"} Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.028654 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ffl2q" event={"ID":"8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc","Type":"ContainerDied","Data":"ee3e1ca2783dd509c2a9930a4c3fa6567ec170b6249235ca621da8334bf0cae0"} Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.028656 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ffl2q" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.028703 4998 scope.go:117] "RemoveContainer" containerID="28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.058853 4998 scope.go:117] "RemoveContainer" containerID="03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.082814 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.088377 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ffl2q"] Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.103616 4998 scope.go:117] "RemoveContainer" containerID="448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.121132 4998 scope.go:117] "RemoveContainer" containerID="28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9" Feb 03 08:03:47 crc kubenswrapper[4998]: E0203 08:03:47.121702 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9\": container with ID starting with 28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9 not found: ID does not exist" containerID="28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.121733 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9"} err="failed to get container status \"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9\": rpc error: code = NotFound desc = could not find container \"28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9\": container with ID starting with 28e888fad7fdb9749b67fc7e18f6325217dda4690b983633c270ae6fd52e8be9 not found: ID does not exist" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.121791 4998 scope.go:117] "RemoveContainer" containerID="03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e" Feb 03 08:03:47 crc kubenswrapper[4998]: E0203 08:03:47.122159 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e\": container with ID starting with 03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e not found: ID does not exist" containerID="03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.122184 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e"} err="failed to get container status \"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e\": rpc error: code = NotFound desc = could not find container \"03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e\": container with ID starting with 03942ac5a4257c330e131e94a2c4477070bd892bf34d516d9da746b37fac105e not found: ID does not exist" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.122197 4998 scope.go:117] "RemoveContainer" containerID="448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6" Feb 03 08:03:47 crc kubenswrapper[4998]: E0203 08:03:47.122462 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6\": container with ID starting with 448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6 not found: ID does not exist" containerID="448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6" Feb 03 08:03:47 crc kubenswrapper[4998]: I0203 08:03:47.122482 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6"} err="failed to get container status \"448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6\": rpc error: code = NotFound desc = could not find container \"448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6\": container with ID starting with 448b6d960359b027d43eece7521f96f1aac8531f6ba132b18fe2b8cba293fdd6 not found: ID does not exist" Feb 03 08:03:48 crc kubenswrapper[4998]: I0203 08:03:48.442259 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" path="/var/lib/kubelet/pods/8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc/volumes" Feb 03 08:03:52 crc kubenswrapper[4998]: I0203 08:03:52.432675 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:03:52 crc kubenswrapper[4998]: E0203 08:03:52.433245 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:04:03 crc kubenswrapper[4998]: I0203 08:04:03.427884 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:04:03 crc kubenswrapper[4998]: E0203 08:04:03.428773 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:04:14 crc kubenswrapper[4998]: I0203 08:04:14.427775 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:04:14 crc kubenswrapper[4998]: E0203 08:04:14.428384 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:04:25 crc kubenswrapper[4998]: I0203 08:04:25.427389 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:04:25 crc kubenswrapper[4998]: E0203 08:04:25.428056 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:04:38 crc kubenswrapper[4998]: I0203 08:04:38.429993 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:04:38 crc kubenswrapper[4998]: E0203 08:04:38.430832 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:04:53 crc kubenswrapper[4998]: I0203 08:04:53.427836 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:04:53 crc kubenswrapper[4998]: E0203 08:04:53.429071 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:05:07 crc kubenswrapper[4998]: I0203 08:05:07.427295 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:05:07 crc kubenswrapper[4998]: E0203 08:05:07.428066 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:05:21 crc kubenswrapper[4998]: I0203 08:05:21.427898 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:05:21 crc kubenswrapper[4998]: E0203 08:05:21.428555 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:05:32 crc kubenswrapper[4998]: I0203 08:05:32.432395 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:05:32 crc kubenswrapper[4998]: E0203 08:05:32.433047 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:05:47 crc kubenswrapper[4998]: I0203 08:05:47.427125 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:05:47 crc kubenswrapper[4998]: E0203 08:05:47.427989 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:05:59 crc kubenswrapper[4998]: I0203 08:05:59.427221 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:05:59 crc kubenswrapper[4998]: E0203 08:05:59.428210 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:06:10 crc kubenswrapper[4998]: I0203 08:06:10.452195 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:06:10 crc kubenswrapper[4998]: E0203 08:06:10.453209 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:06:21 crc kubenswrapper[4998]: I0203 08:06:21.435006 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:06:22 crc kubenswrapper[4998]: I0203 08:06:22.103277 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e"} Feb 03 08:08:42 crc kubenswrapper[4998]: I0203 08:08:42.754104 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:08:42 crc kubenswrapper[4998]: I0203 08:08:42.754650 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:09:12 crc kubenswrapper[4998]: I0203 08:09:12.754768 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:09:12 crc kubenswrapper[4998]: I0203 08:09:12.755390 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:09:42 crc kubenswrapper[4998]: I0203 08:09:42.755253 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:09:42 crc kubenswrapper[4998]: I0203 08:09:42.755908 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:09:42 crc kubenswrapper[4998]: I0203 08:09:42.755962 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:09:42 crc kubenswrapper[4998]: I0203 08:09:42.756691 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:09:42 crc kubenswrapper[4998]: I0203 08:09:42.756762 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e" gracePeriod=600 Feb 03 08:09:43 crc kubenswrapper[4998]: I0203 08:09:43.556579 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e" exitCode=0 Feb 03 08:09:43 crc kubenswrapper[4998]: I0203 08:09:43.556625 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e"} Feb 03 08:09:43 crc kubenswrapper[4998]: I0203 08:09:43.556936 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26"} Feb 03 08:09:43 crc kubenswrapper[4998]: I0203 08:09:43.556960 4998 scope.go:117] "RemoveContainer" containerID="59350f56fc83c39f196b6f8d69fba3062b7aab975006aa0be70d4a3ac03a44b6" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.991377 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:33 crc kubenswrapper[4998]: E0203 08:11:33.992200 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="registry-server" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.992220 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="registry-server" Feb 03 08:11:33 crc kubenswrapper[4998]: E0203 08:11:33.992239 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="extract-utilities" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.992249 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="extract-utilities" Feb 03 08:11:33 crc kubenswrapper[4998]: E0203 08:11:33.992261 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="extract-content" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.992271 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="extract-content" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.992394 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e9d9a8f-f41a-4e8c-9132-fc1f3870cfbc" containerName="registry-server" Feb 03 08:11:33 crc kubenswrapper[4998]: I0203 08:11:33.993398 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.006016 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.117900 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.117961 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.118050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mwp8\" (UniqueName: \"kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.219132 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mwp8\" (UniqueName: \"kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.219222 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.219243 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.219710 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.219752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.240590 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mwp8\" (UniqueName: \"kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8\") pod \"certified-operators-grzjh\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.325575 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:34 crc kubenswrapper[4998]: I0203 08:11:34.803349 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:35 crc kubenswrapper[4998]: I0203 08:11:35.299322 4998 generic.go:334] "Generic (PLEG): container finished" podID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerID="fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31" exitCode=0 Feb 03 08:11:35 crc kubenswrapper[4998]: I0203 08:11:35.299446 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerDied","Data":"fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31"} Feb 03 08:11:35 crc kubenswrapper[4998]: I0203 08:11:35.299635 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerStarted","Data":"6388fe8aca54be9256ea6c7c8a436c18e03e12135d8fe5f497ffd6b319111604"} Feb 03 08:11:35 crc kubenswrapper[4998]: I0203 08:11:35.301722 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:11:36 crc kubenswrapper[4998]: I0203 08:11:36.330327 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerStarted","Data":"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9"} Feb 03 08:11:37 crc kubenswrapper[4998]: I0203 08:11:37.338359 4998 generic.go:334] "Generic (PLEG): container finished" podID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerID="a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9" exitCode=0 Feb 03 08:11:37 crc kubenswrapper[4998]: I0203 08:11:37.338403 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerDied","Data":"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9"} Feb 03 08:11:37 crc kubenswrapper[4998]: I0203 08:11:37.338429 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerStarted","Data":"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954"} Feb 03 08:11:37 crc kubenswrapper[4998]: I0203 08:11:37.357505 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-grzjh" podStartSLOduration=2.816001568 podStartE2EDuration="4.357485393s" podCreationTimestamp="2026-02-03 08:11:33 +0000 UTC" firstStartedPulling="2026-02-03 08:11:35.301428778 +0000 UTC m=+5133.588122584" lastFinishedPulling="2026-02-03 08:11:36.842912603 +0000 UTC m=+5135.129606409" observedRunningTime="2026-02-03 08:11:37.353062277 +0000 UTC m=+5135.639756093" watchObservedRunningTime="2026-02-03 08:11:37.357485393 +0000 UTC m=+5135.644179199" Feb 03 08:11:44 crc kubenswrapper[4998]: I0203 08:11:44.325837 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:44 crc kubenswrapper[4998]: I0203 08:11:44.326444 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:44 crc kubenswrapper[4998]: I0203 08:11:44.377243 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:44 crc kubenswrapper[4998]: I0203 08:11:44.425326 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:44 crc kubenswrapper[4998]: I0203 08:11:44.611269 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.399576 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-grzjh" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="registry-server" containerID="cri-o://958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954" gracePeriod=2 Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.776627 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.859444 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content\") pod \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.859494 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mwp8\" (UniqueName: \"kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8\") pod \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.859580 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities\") pod \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\" (UID: \"c4dabe7f-164c-4c8d-a1a2-5d8e74066966\") " Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.860595 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities" (OuterVolumeSpecName: "utilities") pod "c4dabe7f-164c-4c8d-a1a2-5d8e74066966" (UID: "c4dabe7f-164c-4c8d-a1a2-5d8e74066966"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.865715 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8" (OuterVolumeSpecName: "kube-api-access-2mwp8") pod "c4dabe7f-164c-4c8d-a1a2-5d8e74066966" (UID: "c4dabe7f-164c-4c8d-a1a2-5d8e74066966"). InnerVolumeSpecName "kube-api-access-2mwp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.962009 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mwp8\" (UniqueName: \"kubernetes.io/projected/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-kube-api-access-2mwp8\") on node \"crc\" DevicePath \"\"" Feb 03 08:11:46 crc kubenswrapper[4998]: I0203 08:11:46.962045 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.409412 4998 generic.go:334] "Generic (PLEG): container finished" podID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerID="958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954" exitCode=0 Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.409477 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerDied","Data":"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954"} Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.409523 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzjh" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.409899 4998 scope.go:117] "RemoveContainer" containerID="958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.410156 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzjh" event={"ID":"c4dabe7f-164c-4c8d-a1a2-5d8e74066966","Type":"ContainerDied","Data":"6388fe8aca54be9256ea6c7c8a436c18e03e12135d8fe5f497ffd6b319111604"} Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.430117 4998 scope.go:117] "RemoveContainer" containerID="a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.448479 4998 scope.go:117] "RemoveContainer" containerID="fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.470750 4998 scope.go:117] "RemoveContainer" containerID="958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954" Feb 03 08:11:47 crc kubenswrapper[4998]: E0203 08:11:47.471286 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954\": container with ID starting with 958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954 not found: ID does not exist" containerID="958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.471330 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954"} err="failed to get container status \"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954\": rpc error: code = NotFound desc = could not find container \"958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954\": container with ID starting with 958dad34d1c63264cd2103824e5c4bb06aeb7feaf1363b28aa6c5ceb5e527954 not found: ID does not exist" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.471354 4998 scope.go:117] "RemoveContainer" containerID="a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9" Feb 03 08:11:47 crc kubenswrapper[4998]: E0203 08:11:47.471728 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9\": container with ID starting with a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9 not found: ID does not exist" containerID="a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.471763 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9"} err="failed to get container status \"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9\": rpc error: code = NotFound desc = could not find container \"a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9\": container with ID starting with a1b0b06b5400ea291d2e28925c5c3b82074b3a18075267c4ce6799dc5bdc52d9 not found: ID does not exist" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.471808 4998 scope.go:117] "RemoveContainer" containerID="fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31" Feb 03 08:11:47 crc kubenswrapper[4998]: E0203 08:11:47.472085 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31\": container with ID starting with fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31 not found: ID does not exist" containerID="fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.472107 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31"} err="failed to get container status \"fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31\": rpc error: code = NotFound desc = could not find container \"fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31\": container with ID starting with fb31ad07f74a83f0c727bf250aa6828d89daf7ef4d761e0ea271dcf7148d9f31 not found: ID does not exist" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.525128 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4dabe7f-164c-4c8d-a1a2-5d8e74066966" (UID: "c4dabe7f-164c-4c8d-a1a2-5d8e74066966"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.571553 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4dabe7f-164c-4c8d-a1a2-5d8e74066966-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.745202 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:47 crc kubenswrapper[4998]: I0203 08:11:47.750442 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-grzjh"] Feb 03 08:11:48 crc kubenswrapper[4998]: I0203 08:11:48.435668 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" path="/var/lib/kubelet/pods/c4dabe7f-164c-4c8d-a1a2-5d8e74066966/volumes" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.618048 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:11:58 crc kubenswrapper[4998]: E0203 08:11:58.618830 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="registry-server" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.618849 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="registry-server" Feb 03 08:11:58 crc kubenswrapper[4998]: E0203 08:11:58.618878 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="extract-content" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.618885 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="extract-content" Feb 03 08:11:58 crc kubenswrapper[4998]: E0203 08:11:58.618899 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="extract-utilities" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.618908 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="extract-utilities" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.619114 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4dabe7f-164c-4c8d-a1a2-5d8e74066966" containerName="registry-server" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.629156 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.631978 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.733452 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6ps8\" (UniqueName: \"kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.733564 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.733585 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.835429 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.835473 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.835547 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6ps8\" (UniqueName: \"kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.835992 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.836072 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.854225 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6ps8\" (UniqueName: \"kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8\") pod \"community-operators-wq8ln\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:58 crc kubenswrapper[4998]: I0203 08:11:58.952107 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:11:59 crc kubenswrapper[4998]: I0203 08:11:59.407334 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:11:59 crc kubenswrapper[4998]: I0203 08:11:59.523178 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerStarted","Data":"a3bfbaa14d0709457fca8e9dba06d4b883d4e86ce1376c05db1f355cb0118984"} Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.531183 4998 generic.go:334] "Generic (PLEG): container finished" podID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerID="f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe" exitCode=0 Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.531233 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerDied","Data":"f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe"} Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.824610 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.826492 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.835744 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.885906 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x942l\" (UniqueName: \"kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.886285 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.886444 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.987671 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x942l\" (UniqueName: \"kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.988087 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.988201 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.988504 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:00 crc kubenswrapper[4998]: I0203 08:12:00.988564 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:01 crc kubenswrapper[4998]: I0203 08:12:01.010884 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x942l\" (UniqueName: \"kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l\") pod \"redhat-marketplace-gkmnw\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:01 crc kubenswrapper[4998]: I0203 08:12:01.159605 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:01 crc kubenswrapper[4998]: I0203 08:12:01.426473 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:01 crc kubenswrapper[4998]: I0203 08:12:01.539009 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerStarted","Data":"33aabcdc86cf7b2fdd03f7813250711bc01578d0e911738f2854d64c3bf40267"} Feb 03 08:12:02 crc kubenswrapper[4998]: I0203 08:12:02.549944 4998 generic.go:334] "Generic (PLEG): container finished" podID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerID="6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614" exitCode=0 Feb 03 08:12:02 crc kubenswrapper[4998]: I0203 08:12:02.550071 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerDied","Data":"6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614"} Feb 03 08:12:02 crc kubenswrapper[4998]: I0203 08:12:02.553709 4998 generic.go:334] "Generic (PLEG): container finished" podID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerID="359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10" exitCode=0 Feb 03 08:12:02 crc kubenswrapper[4998]: I0203 08:12:02.553768 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerDied","Data":"359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10"} Feb 03 08:12:03 crc kubenswrapper[4998]: I0203 08:12:03.561596 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerStarted","Data":"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8"} Feb 03 08:12:04 crc kubenswrapper[4998]: I0203 08:12:04.569021 4998 generic.go:334] "Generic (PLEG): container finished" podID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerID="322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c" exitCode=0 Feb 03 08:12:04 crc kubenswrapper[4998]: I0203 08:12:04.569176 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerDied","Data":"322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c"} Feb 03 08:12:04 crc kubenswrapper[4998]: I0203 08:12:04.593596 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wq8ln" podStartSLOduration=4.110936672 podStartE2EDuration="6.593572058s" podCreationTimestamp="2026-02-03 08:11:58 +0000 UTC" firstStartedPulling="2026-02-03 08:12:00.533287997 +0000 UTC m=+5158.819981803" lastFinishedPulling="2026-02-03 08:12:03.015923373 +0000 UTC m=+5161.302617189" observedRunningTime="2026-02-03 08:12:03.586728245 +0000 UTC m=+5161.873422051" watchObservedRunningTime="2026-02-03 08:12:04.593572058 +0000 UTC m=+5162.880265884" Feb 03 08:12:05 crc kubenswrapper[4998]: I0203 08:12:05.580438 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerStarted","Data":"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8"} Feb 03 08:12:05 crc kubenswrapper[4998]: I0203 08:12:05.609541 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gkmnw" podStartSLOduration=2.983335226 podStartE2EDuration="5.609490521s" podCreationTimestamp="2026-02-03 08:12:00 +0000 UTC" firstStartedPulling="2026-02-03 08:12:02.558606026 +0000 UTC m=+5160.845299832" lastFinishedPulling="2026-02-03 08:12:05.184761311 +0000 UTC m=+5163.471455127" observedRunningTime="2026-02-03 08:12:05.6006827 +0000 UTC m=+5163.887376526" watchObservedRunningTime="2026-02-03 08:12:05.609490521 +0000 UTC m=+5163.896184327" Feb 03 08:12:08 crc kubenswrapper[4998]: I0203 08:12:08.953284 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:08 crc kubenswrapper[4998]: I0203 08:12:08.953597 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:08 crc kubenswrapper[4998]: I0203 08:12:08.995869 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:09 crc kubenswrapper[4998]: I0203 08:12:09.642904 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:10 crc kubenswrapper[4998]: I0203 08:12:10.405412 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:12:11 crc kubenswrapper[4998]: I0203 08:12:11.159835 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:11 crc kubenswrapper[4998]: I0203 08:12:11.159899 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:11 crc kubenswrapper[4998]: I0203 08:12:11.228083 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:11 crc kubenswrapper[4998]: I0203 08:12:11.618027 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wq8ln" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="registry-server" containerID="cri-o://1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8" gracePeriod=2 Feb 03 08:12:11 crc kubenswrapper[4998]: I0203 08:12:11.658026 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.001817 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.047733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content\") pod \"6d4d3002-0580-4d8a-8cce-3f172b202d95\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.048030 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities\") pod \"6d4d3002-0580-4d8a-8cce-3f172b202d95\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.048913 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities" (OuterVolumeSpecName: "utilities") pod "6d4d3002-0580-4d8a-8cce-3f172b202d95" (UID: "6d4d3002-0580-4d8a-8cce-3f172b202d95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.049048 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6ps8\" (UniqueName: \"kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8\") pod \"6d4d3002-0580-4d8a-8cce-3f172b202d95\" (UID: \"6d4d3002-0580-4d8a-8cce-3f172b202d95\") " Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.050342 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.056909 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8" (OuterVolumeSpecName: "kube-api-access-b6ps8") pod "6d4d3002-0580-4d8a-8cce-3f172b202d95" (UID: "6d4d3002-0580-4d8a-8cce-3f172b202d95"). InnerVolumeSpecName "kube-api-access-b6ps8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.101852 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d4d3002-0580-4d8a-8cce-3f172b202d95" (UID: "6d4d3002-0580-4d8a-8cce-3f172b202d95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.151355 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6ps8\" (UniqueName: \"kubernetes.io/projected/6d4d3002-0580-4d8a-8cce-3f172b202d95-kube-api-access-b6ps8\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.151419 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d4d3002-0580-4d8a-8cce-3f172b202d95-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.628129 4998 generic.go:334] "Generic (PLEG): container finished" podID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerID="1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8" exitCode=0 Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.628200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerDied","Data":"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8"} Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.628234 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wq8ln" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.628252 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wq8ln" event={"ID":"6d4d3002-0580-4d8a-8cce-3f172b202d95","Type":"ContainerDied","Data":"a3bfbaa14d0709457fca8e9dba06d4b883d4e86ce1376c05db1f355cb0118984"} Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.628274 4998 scope.go:117] "RemoveContainer" containerID="1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.653727 4998 scope.go:117] "RemoveContainer" containerID="6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.653895 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.659317 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wq8ln"] Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.675988 4998 scope.go:117] "RemoveContainer" containerID="f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.691989 4998 scope.go:117] "RemoveContainer" containerID="1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8" Feb 03 08:12:12 crc kubenswrapper[4998]: E0203 08:12:12.692431 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8\": container with ID starting with 1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8 not found: ID does not exist" containerID="1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.692472 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8"} err="failed to get container status \"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8\": rpc error: code = NotFound desc = could not find container \"1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8\": container with ID starting with 1df5cb19376a7f638dceb8697887ae458348ff36a0e601bc8f0c10653cc41dd8 not found: ID does not exist" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.692497 4998 scope.go:117] "RemoveContainer" containerID="6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614" Feb 03 08:12:12 crc kubenswrapper[4998]: E0203 08:12:12.692736 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614\": container with ID starting with 6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614 not found: ID does not exist" containerID="6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.692756 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614"} err="failed to get container status \"6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614\": rpc error: code = NotFound desc = could not find container \"6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614\": container with ID starting with 6b215faa0ddb21ac65c95a1371b0476a1c3c6d94a09e1cdd70d7b2b248239614 not found: ID does not exist" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.692773 4998 scope.go:117] "RemoveContainer" containerID="f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe" Feb 03 08:12:12 crc kubenswrapper[4998]: E0203 08:12:12.693057 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe\": container with ID starting with f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe not found: ID does not exist" containerID="f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.693081 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe"} err="failed to get container status \"f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe\": rpc error: code = NotFound desc = could not find container \"f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe\": container with ID starting with f22cc3d80dd9bfaf5d8684c1b8f1871782d2429b21f1e8ff6c4cf53ce65a3abe not found: ID does not exist" Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.754026 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:12:12 crc kubenswrapper[4998]: I0203 08:12:12.754095 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:12:13 crc kubenswrapper[4998]: I0203 08:12:13.405951 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:13 crc kubenswrapper[4998]: I0203 08:12:13.636017 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gkmnw" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="registry-server" containerID="cri-o://69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8" gracePeriod=2 Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.034179 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.079843 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities\") pod \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.079937 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x942l\" (UniqueName: \"kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l\") pod \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.080064 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content\") pod \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\" (UID: \"e1ceb31c-bbeb-4c30-af41-1f0696b3a670\") " Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.080990 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities" (OuterVolumeSpecName: "utilities") pod "e1ceb31c-bbeb-4c30-af41-1f0696b3a670" (UID: "e1ceb31c-bbeb-4c30-af41-1f0696b3a670"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.085665 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l" (OuterVolumeSpecName: "kube-api-access-x942l") pod "e1ceb31c-bbeb-4c30-af41-1f0696b3a670" (UID: "e1ceb31c-bbeb-4c30-af41-1f0696b3a670"). InnerVolumeSpecName "kube-api-access-x942l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.106165 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1ceb31c-bbeb-4c30-af41-1f0696b3a670" (UID: "e1ceb31c-bbeb-4c30-af41-1f0696b3a670"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.182179 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.182226 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.182239 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x942l\" (UniqueName: \"kubernetes.io/projected/e1ceb31c-bbeb-4c30-af41-1f0696b3a670-kube-api-access-x942l\") on node \"crc\" DevicePath \"\"" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.437480 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" path="/var/lib/kubelet/pods/6d4d3002-0580-4d8a-8cce-3f172b202d95/volumes" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.643267 4998 generic.go:334] "Generic (PLEG): container finished" podID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerID="69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8" exitCode=0 Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.643313 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkmnw" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.643328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerDied","Data":"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8"} Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.643396 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkmnw" event={"ID":"e1ceb31c-bbeb-4c30-af41-1f0696b3a670","Type":"ContainerDied","Data":"33aabcdc86cf7b2fdd03f7813250711bc01578d0e911738f2854d64c3bf40267"} Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.643419 4998 scope.go:117] "RemoveContainer" containerID="69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.660678 4998 scope.go:117] "RemoveContainer" containerID="322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.669380 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.677267 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkmnw"] Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.687560 4998 scope.go:117] "RemoveContainer" containerID="359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.707264 4998 scope.go:117] "RemoveContainer" containerID="69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8" Feb 03 08:12:14 crc kubenswrapper[4998]: E0203 08:12:14.707704 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8\": container with ID starting with 69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8 not found: ID does not exist" containerID="69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.707758 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8"} err="failed to get container status \"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8\": rpc error: code = NotFound desc = could not find container \"69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8\": container with ID starting with 69233abde20c7b5ac3534fc17ff5c2d1338a7a1a8c45463cbed826696b4feeb8 not found: ID does not exist" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.707834 4998 scope.go:117] "RemoveContainer" containerID="322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c" Feb 03 08:12:14 crc kubenswrapper[4998]: E0203 08:12:14.708101 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c\": container with ID starting with 322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c not found: ID does not exist" containerID="322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.708255 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c"} err="failed to get container status \"322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c\": rpc error: code = NotFound desc = could not find container \"322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c\": container with ID starting with 322f7fe367b94fbc385f19350dc171158db9a651d6550d85c8f79523f2a2d03c not found: ID does not exist" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.708365 4998 scope.go:117] "RemoveContainer" containerID="359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10" Feb 03 08:12:14 crc kubenswrapper[4998]: E0203 08:12:14.708659 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10\": container with ID starting with 359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10 not found: ID does not exist" containerID="359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10" Feb 03 08:12:14 crc kubenswrapper[4998]: I0203 08:12:14.708773 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10"} err="failed to get container status \"359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10\": rpc error: code = NotFound desc = could not find container \"359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10\": container with ID starting with 359f5d7600e4016e9fff5541827f3cd48a47e4299d13cbdbb9bcef03654bdb10 not found: ID does not exist" Feb 03 08:12:16 crc kubenswrapper[4998]: I0203 08:12:16.442482 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" path="/var/lib/kubelet/pods/e1ceb31c-bbeb-4c30-af41-1f0696b3a670/volumes" Feb 03 08:12:42 crc kubenswrapper[4998]: I0203 08:12:42.754567 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:12:42 crc kubenswrapper[4998]: I0203 08:12:42.756490 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:13:12 crc kubenswrapper[4998]: I0203 08:13:12.754052 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:13:12 crc kubenswrapper[4998]: I0203 08:13:12.754584 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:13:12 crc kubenswrapper[4998]: I0203 08:13:12.754622 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:13:12 crc kubenswrapper[4998]: I0203 08:13:12.755235 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:13:12 crc kubenswrapper[4998]: I0203 08:13:12.755295 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" gracePeriod=600 Feb 03 08:13:12 crc kubenswrapper[4998]: E0203 08:13:12.883445 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:13:13 crc kubenswrapper[4998]: I0203 08:13:13.045342 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" exitCode=0 Feb 03 08:13:13 crc kubenswrapper[4998]: I0203 08:13:13.045409 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26"} Feb 03 08:13:13 crc kubenswrapper[4998]: I0203 08:13:13.045473 4998 scope.go:117] "RemoveContainer" containerID="04356c01a375169cbe715e23ad0d292638012c5a0c05bbba9c0b45970c057e0e" Feb 03 08:13:13 crc kubenswrapper[4998]: I0203 08:13:13.046076 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:13:13 crc kubenswrapper[4998]: E0203 08:13:13.046372 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:13:27 crc kubenswrapper[4998]: I0203 08:13:27.428630 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:13:27 crc kubenswrapper[4998]: E0203 08:13:27.429487 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:13:42 crc kubenswrapper[4998]: I0203 08:13:42.431573 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:13:42 crc kubenswrapper[4998]: E0203 08:13:42.432286 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:13:54 crc kubenswrapper[4998]: I0203 08:13:54.427681 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:13:54 crc kubenswrapper[4998]: E0203 08:13:54.428300 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:14:09 crc kubenswrapper[4998]: I0203 08:14:09.429152 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:14:09 crc kubenswrapper[4998]: E0203 08:14:09.429801 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:14:24 crc kubenswrapper[4998]: I0203 08:14:24.428144 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:14:24 crc kubenswrapper[4998]: E0203 08:14:24.429131 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.233382 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wkbsj"] Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.235352 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="extract-utilities" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.235481 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="extract-utilities" Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.235588 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="extract-utilities" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.235673 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="extract-utilities" Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.235806 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="extract-content" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.235881 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="extract-content" Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.235961 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="extract-content" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.236039 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="extract-content" Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.236124 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.236211 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: E0203 08:14:29.236304 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.236385 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.236618 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ceb31c-bbeb-4c30-af41-1f0696b3a670" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.236737 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d4d3002-0580-4d8a-8cce-3f172b202d95" containerName="registry-server" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.237881 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.253678 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wkbsj"] Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.357217 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-catalog-content\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.357517 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjccl\" (UniqueName: \"kubernetes.io/projected/ae1e9817-fa8c-461b-b483-913f397dbe95-kube-api-access-tjccl\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.357738 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-utilities\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.459323 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-utilities\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.459682 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-catalog-content\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.459843 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjccl\" (UniqueName: \"kubernetes.io/projected/ae1e9817-fa8c-461b-b483-913f397dbe95-kube-api-access-tjccl\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.459960 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-utilities\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.460192 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1e9817-fa8c-461b-b483-913f397dbe95-catalog-content\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.484653 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjccl\" (UniqueName: \"kubernetes.io/projected/ae1e9817-fa8c-461b-b483-913f397dbe95-kube-api-access-tjccl\") pod \"redhat-operators-wkbsj\" (UID: \"ae1e9817-fa8c-461b-b483-913f397dbe95\") " pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:29 crc kubenswrapper[4998]: I0203 08:14:29.561766 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:30 crc kubenswrapper[4998]: I0203 08:14:30.050997 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wkbsj"] Feb 03 08:14:30 crc kubenswrapper[4998]: I0203 08:14:30.582136 4998 generic.go:334] "Generic (PLEG): container finished" podID="ae1e9817-fa8c-461b-b483-913f397dbe95" containerID="8d3e680e5cbba5db183ffafb4e91f8da9a48640d180585a20aa579eb924506b2" exitCode=0 Feb 03 08:14:30 crc kubenswrapper[4998]: I0203 08:14:30.582238 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wkbsj" event={"ID":"ae1e9817-fa8c-461b-b483-913f397dbe95","Type":"ContainerDied","Data":"8d3e680e5cbba5db183ffafb4e91f8da9a48640d180585a20aa579eb924506b2"} Feb 03 08:14:30 crc kubenswrapper[4998]: I0203 08:14:30.582453 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wkbsj" event={"ID":"ae1e9817-fa8c-461b-b483-913f397dbe95","Type":"ContainerStarted","Data":"ac06d0d54b75597429995b3af31fb10bd9d49ecb2019a6bd78f3462ba9877ecf"} Feb 03 08:14:38 crc kubenswrapper[4998]: I0203 08:14:38.428201 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:14:38 crc kubenswrapper[4998]: E0203 08:14:38.430115 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:14:39 crc kubenswrapper[4998]: I0203 08:14:39.668925 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wkbsj" event={"ID":"ae1e9817-fa8c-461b-b483-913f397dbe95","Type":"ContainerStarted","Data":"d8f37e1fc8567672a7bf01ea86f12b07b540a46a701d6b63084a70df812d7f14"} Feb 03 08:14:40 crc kubenswrapper[4998]: I0203 08:14:40.677335 4998 generic.go:334] "Generic (PLEG): container finished" podID="ae1e9817-fa8c-461b-b483-913f397dbe95" containerID="d8f37e1fc8567672a7bf01ea86f12b07b540a46a701d6b63084a70df812d7f14" exitCode=0 Feb 03 08:14:40 crc kubenswrapper[4998]: I0203 08:14:40.677379 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wkbsj" event={"ID":"ae1e9817-fa8c-461b-b483-913f397dbe95","Type":"ContainerDied","Data":"d8f37e1fc8567672a7bf01ea86f12b07b540a46a701d6b63084a70df812d7f14"} Feb 03 08:14:41 crc kubenswrapper[4998]: I0203 08:14:41.688258 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wkbsj" event={"ID":"ae1e9817-fa8c-461b-b483-913f397dbe95","Type":"ContainerStarted","Data":"b86dccd81c669dabdaf6ef300f7543726d4a8aff24cd43d62bb95efee4af8ef7"} Feb 03 08:14:41 crc kubenswrapper[4998]: I0203 08:14:41.711528 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wkbsj" podStartSLOduration=1.9967439439999999 podStartE2EDuration="12.711503667s" podCreationTimestamp="2026-02-03 08:14:29 +0000 UTC" firstStartedPulling="2026-02-03 08:14:30.583729118 +0000 UTC m=+5308.870422924" lastFinishedPulling="2026-02-03 08:14:41.298488841 +0000 UTC m=+5319.585182647" observedRunningTime="2026-02-03 08:14:41.703040086 +0000 UTC m=+5319.989733912" watchObservedRunningTime="2026-02-03 08:14:41.711503667 +0000 UTC m=+5319.998197513" Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.562108 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.562765 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.603551 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.780517 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wkbsj" Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.850232 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wkbsj"] Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.885427 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 08:14:49 crc kubenswrapper[4998]: I0203 08:14:49.886018 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jcps7" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="registry-server" containerID="cri-o://ff39885b9b255e358ba7542261ce909af80e0e6cef1edd1b4bea6b1bd5bea8e2" gracePeriod=2 Feb 03 08:14:51 crc kubenswrapper[4998]: I0203 08:14:51.761266 4998 generic.go:334] "Generic (PLEG): container finished" podID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerID="ff39885b9b255e358ba7542261ce909af80e0e6cef1edd1b4bea6b1bd5bea8e2" exitCode=0 Feb 03 08:14:51 crc kubenswrapper[4998]: I0203 08:14:51.761440 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerDied","Data":"ff39885b9b255e358ba7542261ce909af80e0e6cef1edd1b4bea6b1bd5bea8e2"} Feb 03 08:14:53 crc kubenswrapper[4998]: I0203 08:14:53.427825 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:14:53 crc kubenswrapper[4998]: E0203 08:14:53.428083 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:14:53 crc kubenswrapper[4998]: I0203 08:14:53.851925 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.000437 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r69rv\" (UniqueName: \"kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv\") pod \"99d0d95b-7ead-4827-bdcb-70656ad70707\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.000838 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content\") pod \"99d0d95b-7ead-4827-bdcb-70656ad70707\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.000953 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities\") pod \"99d0d95b-7ead-4827-bdcb-70656ad70707\" (UID: \"99d0d95b-7ead-4827-bdcb-70656ad70707\") " Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.001322 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities" (OuterVolumeSpecName: "utilities") pod "99d0d95b-7ead-4827-bdcb-70656ad70707" (UID: "99d0d95b-7ead-4827-bdcb-70656ad70707"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.006444 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv" (OuterVolumeSpecName: "kube-api-access-r69rv") pod "99d0d95b-7ead-4827-bdcb-70656ad70707" (UID: "99d0d95b-7ead-4827-bdcb-70656ad70707"). InnerVolumeSpecName "kube-api-access-r69rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.102587 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.102641 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r69rv\" (UniqueName: \"kubernetes.io/projected/99d0d95b-7ead-4827-bdcb-70656ad70707-kube-api-access-r69rv\") on node \"crc\" DevicePath \"\"" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.113207 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99d0d95b-7ead-4827-bdcb-70656ad70707" (UID: "99d0d95b-7ead-4827-bdcb-70656ad70707"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.203693 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d0d95b-7ead-4827-bdcb-70656ad70707-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.791530 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcps7" event={"ID":"99d0d95b-7ead-4827-bdcb-70656ad70707","Type":"ContainerDied","Data":"fc54690b0ea5b6788bada296b35cb5867acbdb405b52cc09b1d2b4fdc304dd4a"} Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.791591 4998 scope.go:117] "RemoveContainer" containerID="ff39885b9b255e358ba7542261ce909af80e0e6cef1edd1b4bea6b1bd5bea8e2" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.791592 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcps7" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.812089 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.823559 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jcps7"] Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.825709 4998 scope.go:117] "RemoveContainer" containerID="6e5a31f733637e7892c467af40a0a19a86cb22a7a12ae9bc5704804a481d0d83" Feb 03 08:14:54 crc kubenswrapper[4998]: I0203 08:14:54.847072 4998 scope.go:117] "RemoveContainer" containerID="5ec8fc6e7d1b5b84a41fff8a705b1399ad9502c5710bf039cce33f76b4f1a1ad" Feb 03 08:14:56 crc kubenswrapper[4998]: I0203 08:14:56.438557 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" path="/var/lib/kubelet/pods/99d0d95b-7ead-4827-bdcb-70656ad70707/volumes" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.150540 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2"] Feb 03 08:15:00 crc kubenswrapper[4998]: E0203 08:15:00.151180 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="registry-server" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.151198 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="registry-server" Feb 03 08:15:00 crc kubenswrapper[4998]: E0203 08:15:00.151215 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="extract-utilities" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.151223 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="extract-utilities" Feb 03 08:15:00 crc kubenswrapper[4998]: E0203 08:15:00.151256 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="extract-content" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.151263 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="extract-content" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.151456 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d0d95b-7ead-4827-bdcb-70656ad70707" containerName="registry-server" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.152953 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.155151 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.155532 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.169808 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2"] Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.304965 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.305094 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldfsr\" (UniqueName: \"kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.305253 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.406864 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.406975 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.407044 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldfsr\" (UniqueName: \"kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.408844 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.413160 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.426198 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldfsr\" (UniqueName: \"kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr\") pod \"collect-profiles-29501775-4t7h2\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.473355 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.736669 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2"] Feb 03 08:15:00 crc kubenswrapper[4998]: I0203 08:15:00.835605 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" event={"ID":"f5980e16-37d6-44ff-83d8-2e37675b6cab","Type":"ContainerStarted","Data":"95e3f63974ad972861287f963d87ccfd03f0ff7730bb8d378b7667e7209bdff1"} Feb 03 08:15:01 crc kubenswrapper[4998]: I0203 08:15:01.844467 4998 generic.go:334] "Generic (PLEG): container finished" podID="f5980e16-37d6-44ff-83d8-2e37675b6cab" containerID="3fdd715487c2f9c409ba5da00903fd1c23189cc9db3450baf3962ec0bed6fbe5" exitCode=0 Feb 03 08:15:01 crc kubenswrapper[4998]: I0203 08:15:01.844527 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" event={"ID":"f5980e16-37d6-44ff-83d8-2e37675b6cab","Type":"ContainerDied","Data":"3fdd715487c2f9c409ba5da00903fd1c23189cc9db3450baf3962ec0bed6fbe5"} Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.103133 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.252368 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldfsr\" (UniqueName: \"kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr\") pod \"f5980e16-37d6-44ff-83d8-2e37675b6cab\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.252708 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume\") pod \"f5980e16-37d6-44ff-83d8-2e37675b6cab\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.252792 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume\") pod \"f5980e16-37d6-44ff-83d8-2e37675b6cab\" (UID: \"f5980e16-37d6-44ff-83d8-2e37675b6cab\") " Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.253268 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume" (OuterVolumeSpecName: "config-volume") pod "f5980e16-37d6-44ff-83d8-2e37675b6cab" (UID: "f5980e16-37d6-44ff-83d8-2e37675b6cab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.257616 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr" (OuterVolumeSpecName: "kube-api-access-ldfsr") pod "f5980e16-37d6-44ff-83d8-2e37675b6cab" (UID: "f5980e16-37d6-44ff-83d8-2e37675b6cab"). InnerVolumeSpecName "kube-api-access-ldfsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.257667 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f5980e16-37d6-44ff-83d8-2e37675b6cab" (UID: "f5980e16-37d6-44ff-83d8-2e37675b6cab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.353917 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5980e16-37d6-44ff-83d8-2e37675b6cab-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.353954 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5980e16-37d6-44ff-83d8-2e37675b6cab-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.353964 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldfsr\" (UniqueName: \"kubernetes.io/projected/f5980e16-37d6-44ff-83d8-2e37675b6cab-kube-api-access-ldfsr\") on node \"crc\" DevicePath \"\"" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.861497 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" event={"ID":"f5980e16-37d6-44ff-83d8-2e37675b6cab","Type":"ContainerDied","Data":"95e3f63974ad972861287f963d87ccfd03f0ff7730bb8d378b7667e7209bdff1"} Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.861536 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95e3f63974ad972861287f963d87ccfd03f0ff7730bb8d378b7667e7209bdff1" Feb 03 08:15:03 crc kubenswrapper[4998]: I0203 08:15:03.861549 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2" Feb 03 08:15:04 crc kubenswrapper[4998]: I0203 08:15:04.171081 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx"] Feb 03 08:15:04 crc kubenswrapper[4998]: I0203 08:15:04.177538 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-vx9gx"] Feb 03 08:15:04 crc kubenswrapper[4998]: I0203 08:15:04.435469 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a8f35dd-01c5-48a0-b17a-b681dc68312e" path="/var/lib/kubelet/pods/9a8f35dd-01c5-48a0-b17a-b681dc68312e/volumes" Feb 03 08:15:07 crc kubenswrapper[4998]: I0203 08:15:07.302089 4998 scope.go:117] "RemoveContainer" containerID="ea1bc8df95dce708e8d415e92ba0124af8a4f0e8a037ca907febd6cd298a6395" Feb 03 08:15:08 crc kubenswrapper[4998]: I0203 08:15:08.428029 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:15:08 crc kubenswrapper[4998]: E0203 08:15:08.428507 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:15:23 crc kubenswrapper[4998]: I0203 08:15:23.427506 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:15:23 crc kubenswrapper[4998]: E0203 08:15:23.428271 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:15:38 crc kubenswrapper[4998]: I0203 08:15:38.431638 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:15:38 crc kubenswrapper[4998]: E0203 08:15:38.432406 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:15:51 crc kubenswrapper[4998]: I0203 08:15:51.428830 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:15:51 crc kubenswrapper[4998]: E0203 08:15:51.429662 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:16:03 crc kubenswrapper[4998]: I0203 08:16:03.427475 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:16:03 crc kubenswrapper[4998]: E0203 08:16:03.428242 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:16:14 crc kubenswrapper[4998]: I0203 08:16:14.427541 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:16:14 crc kubenswrapper[4998]: E0203 08:16:14.428372 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:16:25 crc kubenswrapper[4998]: I0203 08:16:25.428372 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:16:25 crc kubenswrapper[4998]: E0203 08:16:25.429234 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:16:39 crc kubenswrapper[4998]: I0203 08:16:39.427612 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:16:39 crc kubenswrapper[4998]: E0203 08:16:39.429483 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:16:51 crc kubenswrapper[4998]: I0203 08:16:51.427629 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:16:51 crc kubenswrapper[4998]: E0203 08:16:51.428632 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:17:05 crc kubenswrapper[4998]: I0203 08:17:05.427948 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:17:05 crc kubenswrapper[4998]: E0203 08:17:05.428938 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:17:18 crc kubenswrapper[4998]: I0203 08:17:18.429270 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:17:18 crc kubenswrapper[4998]: E0203 08:17:18.430279 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:17:29 crc kubenswrapper[4998]: I0203 08:17:29.428929 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:17:29 crc kubenswrapper[4998]: E0203 08:17:29.430712 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:17:42 crc kubenswrapper[4998]: I0203 08:17:42.431330 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:17:42 crc kubenswrapper[4998]: E0203 08:17:42.432121 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:17:53 crc kubenswrapper[4998]: I0203 08:17:53.427027 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:17:53 crc kubenswrapper[4998]: E0203 08:17:53.427878 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:18:06 crc kubenswrapper[4998]: I0203 08:18:06.427732 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:18:06 crc kubenswrapper[4998]: E0203 08:18:06.428700 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:18:20 crc kubenswrapper[4998]: I0203 08:18:20.428066 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:18:21 crc kubenswrapper[4998]: I0203 08:18:21.302401 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592"} Feb 03 08:20:42 crc kubenswrapper[4998]: I0203 08:20:42.754242 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:20:42 crc kubenswrapper[4998]: I0203 08:20:42.754994 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:21:12 crc kubenswrapper[4998]: I0203 08:21:12.754868 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:21:12 crc kubenswrapper[4998]: I0203 08:21:12.755510 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:21:42 crc kubenswrapper[4998]: I0203 08:21:42.754036 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:21:42 crc kubenswrapper[4998]: I0203 08:21:42.754630 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:21:42 crc kubenswrapper[4998]: I0203 08:21:42.754676 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:21:42 crc kubenswrapper[4998]: I0203 08:21:42.755439 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:21:42 crc kubenswrapper[4998]: I0203 08:21:42.755493 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592" gracePeriod=600 Feb 03 08:21:43 crc kubenswrapper[4998]: I0203 08:21:43.236876 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592" exitCode=0 Feb 03 08:21:43 crc kubenswrapper[4998]: I0203 08:21:43.236910 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592"} Feb 03 08:21:43 crc kubenswrapper[4998]: I0203 08:21:43.237171 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d"} Feb 03 08:21:43 crc kubenswrapper[4998]: I0203 08:21:43.237190 4998 scope.go:117] "RemoveContainer" containerID="fc8e3c2aff521fcde4877053a4a00fe854e23de31735f2c01ea348ab318f2c26" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.552197 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ksdjm"] Feb 03 08:21:45 crc kubenswrapper[4998]: E0203 08:21:45.553226 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5980e16-37d6-44ff-83d8-2e37675b6cab" containerName="collect-profiles" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.553243 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5980e16-37d6-44ff-83d8-2e37675b6cab" containerName="collect-profiles" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.553430 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5980e16-37d6-44ff-83d8-2e37675b6cab" containerName="collect-profiles" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.554691 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.556665 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fk69\" (UniqueName: \"kubernetes.io/projected/41017d0c-c69b-4907-b3dc-f99f64201dc4-kube-api-access-9fk69\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.556760 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-catalog-content\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.556802 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-utilities\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.558285 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ksdjm"] Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.658074 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fk69\" (UniqueName: \"kubernetes.io/projected/41017d0c-c69b-4907-b3dc-f99f64201dc4-kube-api-access-9fk69\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.658527 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-catalog-content\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.658588 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-utilities\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.659223 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-catalog-content\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.659252 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41017d0c-c69b-4907-b3dc-f99f64201dc4-utilities\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.678005 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fk69\" (UniqueName: \"kubernetes.io/projected/41017d0c-c69b-4907-b3dc-f99f64201dc4-kube-api-access-9fk69\") pod \"certified-operators-ksdjm\" (UID: \"41017d0c-c69b-4907-b3dc-f99f64201dc4\") " pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:45 crc kubenswrapper[4998]: I0203 08:21:45.875835 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:46 crc kubenswrapper[4998]: W0203 08:21:46.398373 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41017d0c_c69b_4907_b3dc_f99f64201dc4.slice/crio-eb6551a1695e5f683e57a39c22639000e47c5a695584a6174ed59d8dd467f9b7 WatchSource:0}: Error finding container eb6551a1695e5f683e57a39c22639000e47c5a695584a6174ed59d8dd467f9b7: Status 404 returned error can't find the container with id eb6551a1695e5f683e57a39c22639000e47c5a695584a6174ed59d8dd467f9b7 Feb 03 08:21:46 crc kubenswrapper[4998]: I0203 08:21:46.409104 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ksdjm"] Feb 03 08:21:47 crc kubenswrapper[4998]: I0203 08:21:47.276264 4998 generic.go:334] "Generic (PLEG): container finished" podID="41017d0c-c69b-4907-b3dc-f99f64201dc4" containerID="3a0c0866be9a7133aaa1c3fe59a66216aca086dcb83bc5fc9806d4077f3551c0" exitCode=0 Feb 03 08:21:47 crc kubenswrapper[4998]: I0203 08:21:47.276337 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ksdjm" event={"ID":"41017d0c-c69b-4907-b3dc-f99f64201dc4","Type":"ContainerDied","Data":"3a0c0866be9a7133aaa1c3fe59a66216aca086dcb83bc5fc9806d4077f3551c0"} Feb 03 08:21:47 crc kubenswrapper[4998]: I0203 08:21:47.276820 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ksdjm" event={"ID":"41017d0c-c69b-4907-b3dc-f99f64201dc4","Type":"ContainerStarted","Data":"eb6551a1695e5f683e57a39c22639000e47c5a695584a6174ed59d8dd467f9b7"} Feb 03 08:21:47 crc kubenswrapper[4998]: I0203 08:21:47.278885 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:21:51 crc kubenswrapper[4998]: I0203 08:21:51.306260 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ksdjm" event={"ID":"41017d0c-c69b-4907-b3dc-f99f64201dc4","Type":"ContainerStarted","Data":"101ef09da83d17924ad2723a385b6d3cd23d7b4f8f8f5b13724da95ecd1a6df7"} Feb 03 08:21:51 crc kubenswrapper[4998]: E0203 08:21:51.597725 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41017d0c_c69b_4907_b3dc_f99f64201dc4.slice/crio-101ef09da83d17924ad2723a385b6d3cd23d7b4f8f8f5b13724da95ecd1a6df7.scope\": RecentStats: unable to find data in memory cache]" Feb 03 08:21:52 crc kubenswrapper[4998]: I0203 08:21:52.316135 4998 generic.go:334] "Generic (PLEG): container finished" podID="41017d0c-c69b-4907-b3dc-f99f64201dc4" containerID="101ef09da83d17924ad2723a385b6d3cd23d7b4f8f8f5b13724da95ecd1a6df7" exitCode=0 Feb 03 08:21:52 crc kubenswrapper[4998]: I0203 08:21:52.316320 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ksdjm" event={"ID":"41017d0c-c69b-4907-b3dc-f99f64201dc4","Type":"ContainerDied","Data":"101ef09da83d17924ad2723a385b6d3cd23d7b4f8f8f5b13724da95ecd1a6df7"} Feb 03 08:21:53 crc kubenswrapper[4998]: I0203 08:21:53.327827 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ksdjm" event={"ID":"41017d0c-c69b-4907-b3dc-f99f64201dc4","Type":"ContainerStarted","Data":"0c031788bf0f26c45ff7ac8ac4763395b64f3f7aed21146bc0869360858a6d78"} Feb 03 08:21:53 crc kubenswrapper[4998]: I0203 08:21:53.354940 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ksdjm" podStartSLOduration=2.7797405939999997 podStartE2EDuration="8.354920243s" podCreationTimestamp="2026-02-03 08:21:45 +0000 UTC" firstStartedPulling="2026-02-03 08:21:47.278250264 +0000 UTC m=+5745.564944070" lastFinishedPulling="2026-02-03 08:21:52.853429913 +0000 UTC m=+5751.140123719" observedRunningTime="2026-02-03 08:21:53.350423505 +0000 UTC m=+5751.637117321" watchObservedRunningTime="2026-02-03 08:21:53.354920243 +0000 UTC m=+5751.641614059" Feb 03 08:21:55 crc kubenswrapper[4998]: I0203 08:21:55.877074 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:55 crc kubenswrapper[4998]: I0203 08:21:55.877540 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:21:55 crc kubenswrapper[4998]: I0203 08:21:55.929983 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:22:05 crc kubenswrapper[4998]: I0203 08:22:05.928549 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ksdjm" Feb 03 08:22:05 crc kubenswrapper[4998]: I0203 08:22:05.992816 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ksdjm"] Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.036958 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.037198 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x6mnf" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="registry-server" containerID="cri-o://322b19f8965428be2fac4f136fe61479e9dcfbdf8176f3b6b96bed6afe4416c9" gracePeriod=2 Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.436169 4998 generic.go:334] "Generic (PLEG): container finished" podID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerID="322b19f8965428be2fac4f136fe61479e9dcfbdf8176f3b6b96bed6afe4416c9" exitCode=0 Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.437125 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerDied","Data":"322b19f8965428be2fac4f136fe61479e9dcfbdf8176f3b6b96bed6afe4416c9"} Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.437179 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6mnf" event={"ID":"c196c5c8-06df-4911-a73d-7ba33f57af97","Type":"ContainerDied","Data":"f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429"} Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.437192 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5e86394bfa9cf27b3aa42be4fcae61049668e61fb0800e491c62ce2398ef429" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.454758 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.630580 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhdmx\" (UniqueName: \"kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx\") pod \"c196c5c8-06df-4911-a73d-7ba33f57af97\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.630730 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content\") pod \"c196c5c8-06df-4911-a73d-7ba33f57af97\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.630880 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities\") pod \"c196c5c8-06df-4911-a73d-7ba33f57af97\" (UID: \"c196c5c8-06df-4911-a73d-7ba33f57af97\") " Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.631497 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities" (OuterVolumeSpecName: "utilities") pod "c196c5c8-06df-4911-a73d-7ba33f57af97" (UID: "c196c5c8-06df-4911-a73d-7ba33f57af97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.645331 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx" (OuterVolumeSpecName: "kube-api-access-lhdmx") pod "c196c5c8-06df-4911-a73d-7ba33f57af97" (UID: "c196c5c8-06df-4911-a73d-7ba33f57af97"). InnerVolumeSpecName "kube-api-access-lhdmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.683812 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c196c5c8-06df-4911-a73d-7ba33f57af97" (UID: "c196c5c8-06df-4911-a73d-7ba33f57af97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.743381 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.743448 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c196c5c8-06df-4911-a73d-7ba33f57af97-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:22:06 crc kubenswrapper[4998]: I0203 08:22:06.743465 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhdmx\" (UniqueName: \"kubernetes.io/projected/c196c5c8-06df-4911-a73d-7ba33f57af97-kube-api-access-lhdmx\") on node \"crc\" DevicePath \"\"" Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.442805 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6mnf" Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.479043 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.492689 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x6mnf"] Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.496590 4998 scope.go:117] "RemoveContainer" containerID="322b19f8965428be2fac4f136fe61479e9dcfbdf8176f3b6b96bed6afe4416c9" Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.518074 4998 scope.go:117] "RemoveContainer" containerID="a24cb0bc329dcde85b4ba03675e8bebcab28f815987b17bfb7914d1e40f0c6a3" Feb 03 08:22:07 crc kubenswrapper[4998]: I0203 08:22:07.539104 4998 scope.go:117] "RemoveContainer" containerID="54e5eed017ed1827111f1e52c3b5fdf77dada26480ba1750344092b6e9f0fcda" Feb 03 08:22:08 crc kubenswrapper[4998]: I0203 08:22:08.440495 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" path="/var/lib/kubelet/pods/c196c5c8-06df-4911-a73d-7ba33f57af97/volumes" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.395733 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:22:56 crc kubenswrapper[4998]: E0203 08:22:56.397549 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="extract-content" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.397632 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="extract-content" Feb 03 08:22:56 crc kubenswrapper[4998]: E0203 08:22:56.397698 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="registry-server" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.397761 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="registry-server" Feb 03 08:22:56 crc kubenswrapper[4998]: E0203 08:22:56.397867 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="extract-utilities" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.397929 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="extract-utilities" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.398130 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c196c5c8-06df-4911-a73d-7ba33f57af97" containerName="registry-server" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.399188 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.424318 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.557270 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.557363 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxc89\" (UniqueName: \"kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.557429 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.659279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.659359 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxc89\" (UniqueName: \"kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.659396 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.659877 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.659896 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.689220 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxc89\" (UniqueName: \"kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89\") pod \"community-operators-v6lgt\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:56 crc kubenswrapper[4998]: I0203 08:22:56.724083 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:22:57 crc kubenswrapper[4998]: I0203 08:22:57.220383 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:22:57 crc kubenswrapper[4998]: I0203 08:22:57.864740 4998 generic.go:334] "Generic (PLEG): container finished" podID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerID="8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730" exitCode=0 Feb 03 08:22:57 crc kubenswrapper[4998]: I0203 08:22:57.865066 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerDied","Data":"8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730"} Feb 03 08:22:57 crc kubenswrapper[4998]: I0203 08:22:57.865097 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerStarted","Data":"b30e77e072144b27480e22998fcb947cdc739b055f77eb2e6ca61532cf9d2316"} Feb 03 08:22:58 crc kubenswrapper[4998]: I0203 08:22:58.873510 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerStarted","Data":"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c"} Feb 03 08:22:59 crc kubenswrapper[4998]: I0203 08:22:59.883965 4998 generic.go:334] "Generic (PLEG): container finished" podID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerID="b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c" exitCode=0 Feb 03 08:22:59 crc kubenswrapper[4998]: I0203 08:22:59.884031 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerDied","Data":"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c"} Feb 03 08:23:00 crc kubenswrapper[4998]: I0203 08:23:00.896329 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerStarted","Data":"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669"} Feb 03 08:23:00 crc kubenswrapper[4998]: I0203 08:23:00.929549 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v6lgt" podStartSLOduration=2.445068403 podStartE2EDuration="4.929525999s" podCreationTimestamp="2026-02-03 08:22:56 +0000 UTC" firstStartedPulling="2026-02-03 08:22:57.867579735 +0000 UTC m=+5816.154273541" lastFinishedPulling="2026-02-03 08:23:00.352037331 +0000 UTC m=+5818.638731137" observedRunningTime="2026-02-03 08:23:00.919368089 +0000 UTC m=+5819.206061915" watchObservedRunningTime="2026-02-03 08:23:00.929525999 +0000 UTC m=+5819.216219825" Feb 03 08:23:06 crc kubenswrapper[4998]: I0203 08:23:06.724313 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:06 crc kubenswrapper[4998]: I0203 08:23:06.724913 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:06 crc kubenswrapper[4998]: I0203 08:23:06.763653 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:06 crc kubenswrapper[4998]: I0203 08:23:06.984143 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:07 crc kubenswrapper[4998]: I0203 08:23:07.035815 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:23:08 crc kubenswrapper[4998]: I0203 08:23:08.953214 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v6lgt" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="registry-server" containerID="cri-o://1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669" gracePeriod=2 Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.329696 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.479635 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content\") pod \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.479733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities\") pod \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.479821 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxc89\" (UniqueName: \"kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89\") pod \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\" (UID: \"6a2ed578-4b04-4add-bfdd-83ebec4ee446\") " Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.481257 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities" (OuterVolumeSpecName: "utilities") pod "6a2ed578-4b04-4add-bfdd-83ebec4ee446" (UID: "6a2ed578-4b04-4add-bfdd-83ebec4ee446"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.485950 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89" (OuterVolumeSpecName: "kube-api-access-fxc89") pod "6a2ed578-4b04-4add-bfdd-83ebec4ee446" (UID: "6a2ed578-4b04-4add-bfdd-83ebec4ee446"). InnerVolumeSpecName "kube-api-access-fxc89". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.582468 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.582508 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxc89\" (UniqueName: \"kubernetes.io/projected/6a2ed578-4b04-4add-bfdd-83ebec4ee446-kube-api-access-fxc89\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.867341 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a2ed578-4b04-4add-bfdd-83ebec4ee446" (UID: "6a2ed578-4b04-4add-bfdd-83ebec4ee446"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.886816 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2ed578-4b04-4add-bfdd-83ebec4ee446-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.964254 4998 generic.go:334] "Generic (PLEG): container finished" podID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerID="1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669" exitCode=0 Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.964310 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerDied","Data":"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669"} Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.964345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6lgt" event={"ID":"6a2ed578-4b04-4add-bfdd-83ebec4ee446","Type":"ContainerDied","Data":"b30e77e072144b27480e22998fcb947cdc739b055f77eb2e6ca61532cf9d2316"} Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.964367 4998 scope.go:117] "RemoveContainer" containerID="1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.964534 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6lgt" Feb 03 08:23:09 crc kubenswrapper[4998]: I0203 08:23:09.985261 4998 scope.go:117] "RemoveContainer" containerID="b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.009081 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.016855 4998 scope.go:117] "RemoveContainer" containerID="8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.017177 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v6lgt"] Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.034658 4998 scope.go:117] "RemoveContainer" containerID="1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669" Feb 03 08:23:10 crc kubenswrapper[4998]: E0203 08:23:10.035144 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669\": container with ID starting with 1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669 not found: ID does not exist" containerID="1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.035180 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669"} err="failed to get container status \"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669\": rpc error: code = NotFound desc = could not find container \"1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669\": container with ID starting with 1cb8d93bb8c7df58edaf1d0c1951a3e4f5eb22fb31b3fe290b41e9508899b669 not found: ID does not exist" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.035205 4998 scope.go:117] "RemoveContainer" containerID="b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c" Feb 03 08:23:10 crc kubenswrapper[4998]: E0203 08:23:10.035518 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c\": container with ID starting with b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c not found: ID does not exist" containerID="b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.035564 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c"} err="failed to get container status \"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c\": rpc error: code = NotFound desc = could not find container \"b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c\": container with ID starting with b25fbb1ba29964fc9d3b7b1aa549f61c6a259460a25e621bc158702a049b102c not found: ID does not exist" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.035597 4998 scope.go:117] "RemoveContainer" containerID="8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730" Feb 03 08:23:10 crc kubenswrapper[4998]: E0203 08:23:10.036224 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730\": container with ID starting with 8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730 not found: ID does not exist" containerID="8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.036357 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730"} err="failed to get container status \"8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730\": rpc error: code = NotFound desc = could not find container \"8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730\": container with ID starting with 8e9954521394778b88cf923474223575935bed3c8781d12c62daf70462cde730 not found: ID does not exist" Feb 03 08:23:10 crc kubenswrapper[4998]: I0203 08:23:10.439230 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" path="/var/lib/kubelet/pods/6a2ed578-4b04-4add-bfdd-83ebec4ee446/volumes" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.620093 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:25 crc kubenswrapper[4998]: E0203 08:23:25.621968 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="registry-server" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.621991 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="registry-server" Feb 03 08:23:25 crc kubenswrapper[4998]: E0203 08:23:25.622032 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="extract-content" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.622040 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="extract-content" Feb 03 08:23:25 crc kubenswrapper[4998]: E0203 08:23:25.622051 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="extract-utilities" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.622061 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="extract-utilities" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.622309 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2ed578-4b04-4add-bfdd-83ebec4ee446" containerName="registry-server" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.623719 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.629868 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.731203 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.731264 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.731405 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhlnk\" (UniqueName: \"kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.832247 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhlnk\" (UniqueName: \"kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.832333 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.832385 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.832960 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.833036 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.853688 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhlnk\" (UniqueName: \"kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk\") pod \"redhat-marketplace-gl68q\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:25 crc kubenswrapper[4998]: I0203 08:23:25.958593 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:26 crc kubenswrapper[4998]: W0203 08:23:26.435669 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc023fe8e_e5d7_4be8_b967_b95715528cb8.slice/crio-f2c3f168ef50a0b519ae142905a4733dd5e38441d703ee501686d114dcc0dff0 WatchSource:0}: Error finding container f2c3f168ef50a0b519ae142905a4733dd5e38441d703ee501686d114dcc0dff0: Status 404 returned error can't find the container with id f2c3f168ef50a0b519ae142905a4733dd5e38441d703ee501686d114dcc0dff0 Feb 03 08:23:26 crc kubenswrapper[4998]: I0203 08:23:26.445897 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:27 crc kubenswrapper[4998]: I0203 08:23:27.105532 4998 generic.go:334] "Generic (PLEG): container finished" podID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerID="92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6" exitCode=0 Feb 03 08:23:27 crc kubenswrapper[4998]: I0203 08:23:27.105606 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerDied","Data":"92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6"} Feb 03 08:23:27 crc kubenswrapper[4998]: I0203 08:23:27.105847 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerStarted","Data":"f2c3f168ef50a0b519ae142905a4733dd5e38441d703ee501686d114dcc0dff0"} Feb 03 08:23:28 crc kubenswrapper[4998]: I0203 08:23:28.114146 4998 generic.go:334] "Generic (PLEG): container finished" podID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerID="177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b" exitCode=0 Feb 03 08:23:28 crc kubenswrapper[4998]: I0203 08:23:28.114202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerDied","Data":"177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b"} Feb 03 08:23:29 crc kubenswrapper[4998]: I0203 08:23:29.124348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerStarted","Data":"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4"} Feb 03 08:23:29 crc kubenswrapper[4998]: I0203 08:23:29.145662 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gl68q" podStartSLOduration=2.7086999130000002 podStartE2EDuration="4.145642835s" podCreationTimestamp="2026-02-03 08:23:25 +0000 UTC" firstStartedPulling="2026-02-03 08:23:27.108480337 +0000 UTC m=+5845.395174163" lastFinishedPulling="2026-02-03 08:23:28.545423279 +0000 UTC m=+5846.832117085" observedRunningTime="2026-02-03 08:23:29.141569809 +0000 UTC m=+5847.428263615" watchObservedRunningTime="2026-02-03 08:23:29.145642835 +0000 UTC m=+5847.432336641" Feb 03 08:23:35 crc kubenswrapper[4998]: I0203 08:23:35.959105 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:35 crc kubenswrapper[4998]: I0203 08:23:35.960031 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:36 crc kubenswrapper[4998]: I0203 08:23:36.003667 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:36 crc kubenswrapper[4998]: I0203 08:23:36.219352 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:36 crc kubenswrapper[4998]: I0203 08:23:36.272208 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.187079 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gl68q" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="registry-server" containerID="cri-o://3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4" gracePeriod=2 Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.581598 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.640566 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities\") pod \"c023fe8e-e5d7-4be8-b967-b95715528cb8\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.640694 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhlnk\" (UniqueName: \"kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk\") pod \"c023fe8e-e5d7-4be8-b967-b95715528cb8\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.640830 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content\") pod \"c023fe8e-e5d7-4be8-b967-b95715528cb8\" (UID: \"c023fe8e-e5d7-4be8-b967-b95715528cb8\") " Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.641653 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities" (OuterVolumeSpecName: "utilities") pod "c023fe8e-e5d7-4be8-b967-b95715528cb8" (UID: "c023fe8e-e5d7-4be8-b967-b95715528cb8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.648911 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk" (OuterVolumeSpecName: "kube-api-access-jhlnk") pod "c023fe8e-e5d7-4be8-b967-b95715528cb8" (UID: "c023fe8e-e5d7-4be8-b967-b95715528cb8"). InnerVolumeSpecName "kube-api-access-jhlnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.673035 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c023fe8e-e5d7-4be8-b967-b95715528cb8" (UID: "c023fe8e-e5d7-4be8-b967-b95715528cb8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.742823 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.742865 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c023fe8e-e5d7-4be8-b967-b95715528cb8-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:38 crc kubenswrapper[4998]: I0203 08:23:38.742881 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhlnk\" (UniqueName: \"kubernetes.io/projected/c023fe8e-e5d7-4be8-b967-b95715528cb8-kube-api-access-jhlnk\") on node \"crc\" DevicePath \"\"" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.197528 4998 generic.go:334] "Generic (PLEG): container finished" podID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerID="3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4" exitCode=0 Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.197584 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerDied","Data":"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4"} Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.197632 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gl68q" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.197645 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gl68q" event={"ID":"c023fe8e-e5d7-4be8-b967-b95715528cb8","Type":"ContainerDied","Data":"f2c3f168ef50a0b519ae142905a4733dd5e38441d703ee501686d114dcc0dff0"} Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.197666 4998 scope.go:117] "RemoveContainer" containerID="3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.218390 4998 scope.go:117] "RemoveContainer" containerID="177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.238648 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.249095 4998 scope.go:117] "RemoveContainer" containerID="92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.256176 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gl68q"] Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.279341 4998 scope.go:117] "RemoveContainer" containerID="3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4" Feb 03 08:23:39 crc kubenswrapper[4998]: E0203 08:23:39.279898 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4\": container with ID starting with 3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4 not found: ID does not exist" containerID="3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.279934 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4"} err="failed to get container status \"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4\": rpc error: code = NotFound desc = could not find container \"3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4\": container with ID starting with 3af042006ded897f74a75b04ed639dc0bc2131f0871e63d5423435ecd4c04ef4 not found: ID does not exist" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.279986 4998 scope.go:117] "RemoveContainer" containerID="177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b" Feb 03 08:23:39 crc kubenswrapper[4998]: E0203 08:23:39.280494 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b\": container with ID starting with 177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b not found: ID does not exist" containerID="177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.280545 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b"} err="failed to get container status \"177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b\": rpc error: code = NotFound desc = could not find container \"177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b\": container with ID starting with 177fe29fccca4356b71afd6d156f1144d2b2b41af72a6750dd20a2ea76ea259b not found: ID does not exist" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.280579 4998 scope.go:117] "RemoveContainer" containerID="92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6" Feb 03 08:23:39 crc kubenswrapper[4998]: E0203 08:23:39.280950 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6\": container with ID starting with 92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6 not found: ID does not exist" containerID="92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6" Feb 03 08:23:39 crc kubenswrapper[4998]: I0203 08:23:39.280978 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6"} err="failed to get container status \"92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6\": rpc error: code = NotFound desc = could not find container \"92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6\": container with ID starting with 92664dbd150d0f210297004156c68ecc5121541269ac8b2b097faf73d7d121e6 not found: ID does not exist" Feb 03 08:23:40 crc kubenswrapper[4998]: I0203 08:23:40.437964 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" path="/var/lib/kubelet/pods/c023fe8e-e5d7-4be8-b967-b95715528cb8/volumes" Feb 03 08:24:12 crc kubenswrapper[4998]: I0203 08:24:12.754316 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:24:12 crc kubenswrapper[4998]: I0203 08:24:12.754939 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:24:42 crc kubenswrapper[4998]: I0203 08:24:42.754138 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:24:42 crc kubenswrapper[4998]: I0203 08:24:42.754755 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.611694 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:10 crc kubenswrapper[4998]: E0203 08:25:10.612636 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="extract-utilities" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.612655 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="extract-utilities" Feb 03 08:25:10 crc kubenswrapper[4998]: E0203 08:25:10.612692 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="extract-content" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.612700 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="extract-content" Feb 03 08:25:10 crc kubenswrapper[4998]: E0203 08:25:10.612714 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="registry-server" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.612721 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="registry-server" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.612925 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c023fe8e-e5d7-4be8-b967-b95715528cb8" containerName="registry-server" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.614220 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.625511 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.687255 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.687319 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.687341 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjds9\" (UniqueName: \"kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.788848 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.788936 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.788973 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjds9\" (UniqueName: \"kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.789525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.789525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.811771 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjds9\" (UniqueName: \"kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9\") pod \"redhat-operators-4chts\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:10 crc kubenswrapper[4998]: I0203 08:25:10.937830 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:11 crc kubenswrapper[4998]: I0203 08:25:11.423798 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:11 crc kubenswrapper[4998]: I0203 08:25:11.854981 4998 generic.go:334] "Generic (PLEG): container finished" podID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerID="6bb78cf7a7993a4a6f3a084e069093fdc5f91c6bce4a1a1ffa22107faa2184b7" exitCode=0 Feb 03 08:25:11 crc kubenswrapper[4998]: I0203 08:25:11.855333 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerDied","Data":"6bb78cf7a7993a4a6f3a084e069093fdc5f91c6bce4a1a1ffa22107faa2184b7"} Feb 03 08:25:11 crc kubenswrapper[4998]: I0203 08:25:11.855418 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerStarted","Data":"ba9f940bd7c8ee49d7030038f920319c8aace81ee7b62da597bccdca950a7201"} Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.754177 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.754234 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.754272 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.754742 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.754819 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" gracePeriod=600 Feb 03 08:25:12 crc kubenswrapper[4998]: I0203 08:25:12.868878 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerStarted","Data":"aebfbc119762a80772bd022b78fb640d3dd36c3af50ad60792315f949b80b263"} Feb 03 08:25:12 crc kubenswrapper[4998]: E0203 08:25:12.878232 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.878343 4998 generic.go:334] "Generic (PLEG): container finished" podID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerID="aebfbc119762a80772bd022b78fb640d3dd36c3af50ad60792315f949b80b263" exitCode=0 Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.878740 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerDied","Data":"aebfbc119762a80772bd022b78fb640d3dd36c3af50ad60792315f949b80b263"} Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.880814 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" exitCode=0 Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.880860 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d"} Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.880900 4998 scope.go:117] "RemoveContainer" containerID="41ad7bac26c2a9d11cf1a118bd1e922df62ba805774b610a67cdfe89ccdb9592" Feb 03 08:25:13 crc kubenswrapper[4998]: I0203 08:25:13.881566 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:25:13 crc kubenswrapper[4998]: E0203 08:25:13.881820 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:25:14 crc kubenswrapper[4998]: I0203 08:25:14.894817 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerStarted","Data":"78c4a2fec224bef0f8058e48e0d2115b9e33f0601605da5ca79380949670c665"} Feb 03 08:25:14 crc kubenswrapper[4998]: I0203 08:25:14.914866 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4chts" podStartSLOduration=2.370961784 podStartE2EDuration="4.914844402s" podCreationTimestamp="2026-02-03 08:25:10 +0000 UTC" firstStartedPulling="2026-02-03 08:25:11.856853322 +0000 UTC m=+5950.143547128" lastFinishedPulling="2026-02-03 08:25:14.40073593 +0000 UTC m=+5952.687429746" observedRunningTime="2026-02-03 08:25:14.912203567 +0000 UTC m=+5953.198897393" watchObservedRunningTime="2026-02-03 08:25:14.914844402 +0000 UTC m=+5953.201538208" Feb 03 08:25:20 crc kubenswrapper[4998]: I0203 08:25:20.938802 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:20 crc kubenswrapper[4998]: I0203 08:25:20.940011 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:20 crc kubenswrapper[4998]: I0203 08:25:20.991252 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:21 crc kubenswrapper[4998]: I0203 08:25:21.985344 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:22 crc kubenswrapper[4998]: I0203 08:25:22.031180 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:23 crc kubenswrapper[4998]: I0203 08:25:23.957082 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4chts" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="registry-server" containerID="cri-o://78c4a2fec224bef0f8058e48e0d2115b9e33f0601605da5ca79380949670c665" gracePeriod=2 Feb 03 08:25:24 crc kubenswrapper[4998]: I0203 08:25:24.965595 4998 generic.go:334] "Generic (PLEG): container finished" podID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerID="78c4a2fec224bef0f8058e48e0d2115b9e33f0601605da5ca79380949670c665" exitCode=0 Feb 03 08:25:24 crc kubenswrapper[4998]: I0203 08:25:24.965668 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerDied","Data":"78c4a2fec224bef0f8058e48e0d2115b9e33f0601605da5ca79380949670c665"} Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.427423 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.506771 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content\") pod \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.507004 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjds9\" (UniqueName: \"kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9\") pod \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.507246 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities\") pod \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\" (UID: \"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f\") " Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.508327 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities" (OuterVolumeSpecName: "utilities") pod "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" (UID: "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.508870 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.520935 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9" (OuterVolumeSpecName: "kube-api-access-hjds9") pod "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" (UID: "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f"). InnerVolumeSpecName "kube-api-access-hjds9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.610062 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjds9\" (UniqueName: \"kubernetes.io/projected/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-kube-api-access-hjds9\") on node \"crc\" DevicePath \"\"" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.628026 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" (UID: "cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.711732 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.973952 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4chts" event={"ID":"cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f","Type":"ContainerDied","Data":"ba9f940bd7c8ee49d7030038f920319c8aace81ee7b62da597bccdca950a7201"} Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.974003 4998 scope.go:117] "RemoveContainer" containerID="78c4a2fec224bef0f8058e48e0d2115b9e33f0601605da5ca79380949670c665" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.974003 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4chts" Feb 03 08:25:25 crc kubenswrapper[4998]: I0203 08:25:25.990656 4998 scope.go:117] "RemoveContainer" containerID="aebfbc119762a80772bd022b78fb640d3dd36c3af50ad60792315f949b80b263" Feb 03 08:25:26 crc kubenswrapper[4998]: I0203 08:25:26.003679 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:26 crc kubenswrapper[4998]: I0203 08:25:26.008401 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4chts"] Feb 03 08:25:26 crc kubenswrapper[4998]: I0203 08:25:26.024011 4998 scope.go:117] "RemoveContainer" containerID="6bb78cf7a7993a4a6f3a084e069093fdc5f91c6bce4a1a1ffa22107faa2184b7" Feb 03 08:25:26 crc kubenswrapper[4998]: I0203 08:25:26.428125 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:25:26 crc kubenswrapper[4998]: E0203 08:25:26.428523 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:25:26 crc kubenswrapper[4998]: I0203 08:25:26.435845 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" path="/var/lib/kubelet/pods/cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f/volumes" Feb 03 08:25:41 crc kubenswrapper[4998]: I0203 08:25:41.427295 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:25:41 crc kubenswrapper[4998]: E0203 08:25:41.428074 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:25:52 crc kubenswrapper[4998]: I0203 08:25:52.432480 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:25:52 crc kubenswrapper[4998]: E0203 08:25:52.433353 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:26:05 crc kubenswrapper[4998]: I0203 08:26:05.428066 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:26:05 crc kubenswrapper[4998]: E0203 08:26:05.428993 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:26:20 crc kubenswrapper[4998]: I0203 08:26:20.428073 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:26:20 crc kubenswrapper[4998]: E0203 08:26:20.429022 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:26:33 crc kubenswrapper[4998]: I0203 08:26:33.427373 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:26:33 crc kubenswrapper[4998]: E0203 08:26:33.428141 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:26:47 crc kubenswrapper[4998]: I0203 08:26:47.427145 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:26:47 crc kubenswrapper[4998]: E0203 08:26:47.427822 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:26:59 crc kubenswrapper[4998]: I0203 08:26:59.427537 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:26:59 crc kubenswrapper[4998]: E0203 08:26:59.428576 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:27:14 crc kubenswrapper[4998]: I0203 08:27:14.427871 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:27:14 crc kubenswrapper[4998]: E0203 08:27:14.428607 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:27:29 crc kubenswrapper[4998]: I0203 08:27:29.429852 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:27:29 crc kubenswrapper[4998]: E0203 08:27:29.430523 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:27:43 crc kubenswrapper[4998]: I0203 08:27:43.428832 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:27:43 crc kubenswrapper[4998]: E0203 08:27:43.430085 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:27:54 crc kubenswrapper[4998]: I0203 08:27:54.427953 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:27:54 crc kubenswrapper[4998]: E0203 08:27:54.429656 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:28:05 crc kubenswrapper[4998]: I0203 08:28:05.427611 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:28:05 crc kubenswrapper[4998]: E0203 08:28:05.428417 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:28:16 crc kubenswrapper[4998]: I0203 08:28:16.429604 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:28:16 crc kubenswrapper[4998]: E0203 08:28:16.430291 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:28:28 crc kubenswrapper[4998]: I0203 08:28:28.428070 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:28:28 crc kubenswrapper[4998]: E0203 08:28:28.428600 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:28:42 crc kubenswrapper[4998]: I0203 08:28:42.432403 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:28:42 crc kubenswrapper[4998]: E0203 08:28:42.433160 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:28:56 crc kubenswrapper[4998]: I0203 08:28:56.427625 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:28:56 crc kubenswrapper[4998]: E0203 08:28:56.428421 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:29:07 crc kubenswrapper[4998]: I0203 08:29:07.428207 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:29:07 crc kubenswrapper[4998]: E0203 08:29:07.428982 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:29:19 crc kubenswrapper[4998]: I0203 08:29:19.427129 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:29:19 crc kubenswrapper[4998]: E0203 08:29:19.427901 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:29:30 crc kubenswrapper[4998]: I0203 08:29:30.427450 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:29:30 crc kubenswrapper[4998]: E0203 08:29:30.429365 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:29:43 crc kubenswrapper[4998]: I0203 08:29:43.427284 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:29:43 crc kubenswrapper[4998]: E0203 08:29:43.428085 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:29:55 crc kubenswrapper[4998]: I0203 08:29:55.427330 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:29:55 crc kubenswrapper[4998]: E0203 08:29:55.428035 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.151229 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj"] Feb 03 08:30:00 crc kubenswrapper[4998]: E0203 08:30:00.152079 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="registry-server" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.152135 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="registry-server" Feb 03 08:30:00 crc kubenswrapper[4998]: E0203 08:30:00.152164 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="extract-content" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.152174 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="extract-content" Feb 03 08:30:00 crc kubenswrapper[4998]: E0203 08:30:00.152186 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="extract-utilities" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.152196 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="extract-utilities" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.152380 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb33cbd4-a73f-4f6e-aebe-cd1f1160a55f" containerName="registry-server" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.153102 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.185460 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.185564 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.191178 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vsjq\" (UniqueName: \"kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.191321 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.191358 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.210058 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj"] Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.294105 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.294148 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.294236 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vsjq\" (UniqueName: \"kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.295466 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.309389 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.312306 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vsjq\" (UniqueName: \"kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq\") pod \"collect-profiles-29501790-q8nwj\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.519338 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:00 crc kubenswrapper[4998]: I0203 08:30:00.963048 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj"] Feb 03 08:30:01 crc kubenswrapper[4998]: I0203 08:30:01.604738 4998 generic.go:334] "Generic (PLEG): container finished" podID="6b547766-0acd-47b5-b97c-29bc1386cf48" containerID="e52b47a773e311dcc1ef48bb0733087704e028407ac7a925de8f990b9c8d4273" exitCode=0 Feb 03 08:30:01 crc kubenswrapper[4998]: I0203 08:30:01.604799 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" event={"ID":"6b547766-0acd-47b5-b97c-29bc1386cf48","Type":"ContainerDied","Data":"e52b47a773e311dcc1ef48bb0733087704e028407ac7a925de8f990b9c8d4273"} Feb 03 08:30:01 crc kubenswrapper[4998]: I0203 08:30:01.605071 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" event={"ID":"6b547766-0acd-47b5-b97c-29bc1386cf48","Type":"ContainerStarted","Data":"15a5ec9e915d6985c436f52fed78c7387aacaf1756b78a12467062840b84d63c"} Feb 03 08:30:02 crc kubenswrapper[4998]: I0203 08:30:02.906544 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.031191 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume\") pod \"6b547766-0acd-47b5-b97c-29bc1386cf48\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.031248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vsjq\" (UniqueName: \"kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq\") pod \"6b547766-0acd-47b5-b97c-29bc1386cf48\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.031294 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume\") pod \"6b547766-0acd-47b5-b97c-29bc1386cf48\" (UID: \"6b547766-0acd-47b5-b97c-29bc1386cf48\") " Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.031632 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume" (OuterVolumeSpecName: "config-volume") pod "6b547766-0acd-47b5-b97c-29bc1386cf48" (UID: "6b547766-0acd-47b5-b97c-29bc1386cf48"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.036305 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6b547766-0acd-47b5-b97c-29bc1386cf48" (UID: "6b547766-0acd-47b5-b97c-29bc1386cf48"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.037139 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq" (OuterVolumeSpecName: "kube-api-access-8vsjq") pod "6b547766-0acd-47b5-b97c-29bc1386cf48" (UID: "6b547766-0acd-47b5-b97c-29bc1386cf48"). InnerVolumeSpecName "kube-api-access-8vsjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.132406 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b547766-0acd-47b5-b97c-29bc1386cf48-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.132458 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vsjq\" (UniqueName: \"kubernetes.io/projected/6b547766-0acd-47b5-b97c-29bc1386cf48-kube-api-access-8vsjq\") on node \"crc\" DevicePath \"\"" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.132476 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b547766-0acd-47b5-b97c-29bc1386cf48-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.619196 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" event={"ID":"6b547766-0acd-47b5-b97c-29bc1386cf48","Type":"ContainerDied","Data":"15a5ec9e915d6985c436f52fed78c7387aacaf1756b78a12467062840b84d63c"} Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.619241 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15a5ec9e915d6985c436f52fed78c7387aacaf1756b78a12467062840b84d63c" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.619276 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj" Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.988304 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g"] Feb 03 08:30:03 crc kubenswrapper[4998]: I0203 08:30:03.994425 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-klt7g"] Feb 03 08:30:04 crc kubenswrapper[4998]: I0203 08:30:04.437438 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55f98dda-91a3-458f-9678-a547da6710b6" path="/var/lib/kubelet/pods/55f98dda-91a3-458f-9678-a547da6710b6/volumes" Feb 03 08:30:06 crc kubenswrapper[4998]: I0203 08:30:06.427668 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:30:06 crc kubenswrapper[4998]: E0203 08:30:06.428112 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:30:07 crc kubenswrapper[4998]: I0203 08:30:07.753876 4998 scope.go:117] "RemoveContainer" containerID="8ab0516c7335841f7c6eff7492237ec5aa64c04a2c50c48514d16db82f9601fe" Feb 03 08:30:21 crc kubenswrapper[4998]: I0203 08:30:21.428489 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:30:21 crc kubenswrapper[4998]: I0203 08:30:21.777194 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c"} Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.795761 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:20 crc kubenswrapper[4998]: E0203 08:32:20.796442 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b547766-0acd-47b5-b97c-29bc1386cf48" containerName="collect-profiles" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.796454 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b547766-0acd-47b5-b97c-29bc1386cf48" containerName="collect-profiles" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.796579 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b547766-0acd-47b5-b97c-29bc1386cf48" containerName="collect-profiles" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.801549 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.819969 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.948514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5f5r\" (UniqueName: \"kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.948628 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:20 crc kubenswrapper[4998]: I0203 08:32:20.948663 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.050156 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5f5r\" (UniqueName: \"kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.050226 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.050244 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.050709 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.051013 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.072698 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5f5r\" (UniqueName: \"kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r\") pod \"certified-operators-jbkb7\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.142552 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.606051 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.843112 4998 generic.go:334] "Generic (PLEG): container finished" podID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerID="8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9" exitCode=0 Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.843175 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerDied","Data":"8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9"} Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.843223 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerStarted","Data":"29d2d422f9b8fbd4f071b7cc786e171dfa5bb3b24401435eade3a6c2a880e6b5"} Feb 03 08:32:21 crc kubenswrapper[4998]: I0203 08:32:21.845275 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:32:22 crc kubenswrapper[4998]: I0203 08:32:22.854194 4998 generic.go:334] "Generic (PLEG): container finished" podID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerID="b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba" exitCode=0 Feb 03 08:32:22 crc kubenswrapper[4998]: I0203 08:32:22.854240 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerDied","Data":"b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba"} Feb 03 08:32:23 crc kubenswrapper[4998]: I0203 08:32:23.863183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerStarted","Data":"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5"} Feb 03 08:32:23 crc kubenswrapper[4998]: I0203 08:32:23.894465 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jbkb7" podStartSLOduration=2.459917942 podStartE2EDuration="3.894433439s" podCreationTimestamp="2026-02-03 08:32:20 +0000 UTC" firstStartedPulling="2026-02-03 08:32:21.845032501 +0000 UTC m=+6380.131726297" lastFinishedPulling="2026-02-03 08:32:23.279547998 +0000 UTC m=+6381.566241794" observedRunningTime="2026-02-03 08:32:23.882111997 +0000 UTC m=+6382.168805843" watchObservedRunningTime="2026-02-03 08:32:23.894433439 +0000 UTC m=+6382.181127275" Feb 03 08:32:31 crc kubenswrapper[4998]: I0203 08:32:31.143415 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:31 crc kubenswrapper[4998]: I0203 08:32:31.144032 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:31 crc kubenswrapper[4998]: I0203 08:32:31.191402 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:31 crc kubenswrapper[4998]: I0203 08:32:31.975555 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:32 crc kubenswrapper[4998]: I0203 08:32:32.017770 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:33 crc kubenswrapper[4998]: I0203 08:32:33.934268 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jbkb7" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="registry-server" containerID="cri-o://a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5" gracePeriod=2 Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.314986 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.448485 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content\") pod \"3fbc06a3-1332-4698-ba6f-4ac16830f999\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.448597 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5f5r\" (UniqueName: \"kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r\") pod \"3fbc06a3-1332-4698-ba6f-4ac16830f999\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.448633 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities\") pod \"3fbc06a3-1332-4698-ba6f-4ac16830f999\" (UID: \"3fbc06a3-1332-4698-ba6f-4ac16830f999\") " Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.449760 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities" (OuterVolumeSpecName: "utilities") pod "3fbc06a3-1332-4698-ba6f-4ac16830f999" (UID: "3fbc06a3-1332-4698-ba6f-4ac16830f999"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.463019 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r" (OuterVolumeSpecName: "kube-api-access-d5f5r") pod "3fbc06a3-1332-4698-ba6f-4ac16830f999" (UID: "3fbc06a3-1332-4698-ba6f-4ac16830f999"). InnerVolumeSpecName "kube-api-access-d5f5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.508367 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fbc06a3-1332-4698-ba6f-4ac16830f999" (UID: "3fbc06a3-1332-4698-ba6f-4ac16830f999"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.553019 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.553053 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5f5r\" (UniqueName: \"kubernetes.io/projected/3fbc06a3-1332-4698-ba6f-4ac16830f999-kube-api-access-d5f5r\") on node \"crc\" DevicePath \"\"" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.553066 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fbc06a3-1332-4698-ba6f-4ac16830f999-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.947575 4998 generic.go:334] "Generic (PLEG): container finished" podID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerID="a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5" exitCode=0 Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.947630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerDied","Data":"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5"} Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.947669 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbkb7" event={"ID":"3fbc06a3-1332-4698-ba6f-4ac16830f999","Type":"ContainerDied","Data":"29d2d422f9b8fbd4f071b7cc786e171dfa5bb3b24401435eade3a6c2a880e6b5"} Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.947671 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbkb7" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.947691 4998 scope.go:117] "RemoveContainer" containerID="a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5" Feb 03 08:32:34 crc kubenswrapper[4998]: I0203 08:32:34.982636 4998 scope.go:117] "RemoveContainer" containerID="b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.011747 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.014483 4998 scope.go:117] "RemoveContainer" containerID="8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.024705 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jbkb7"] Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.040115 4998 scope.go:117] "RemoveContainer" containerID="a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5" Feb 03 08:32:35 crc kubenswrapper[4998]: E0203 08:32:35.040502 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5\": container with ID starting with a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5 not found: ID does not exist" containerID="a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.040551 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5"} err="failed to get container status \"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5\": rpc error: code = NotFound desc = could not find container \"a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5\": container with ID starting with a9463d1189bfee18565c2478a758f6489201c7df004ce00bd7491158bf0ccfc5 not found: ID does not exist" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.040582 4998 scope.go:117] "RemoveContainer" containerID="b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba" Feb 03 08:32:35 crc kubenswrapper[4998]: E0203 08:32:35.041000 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba\": container with ID starting with b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba not found: ID does not exist" containerID="b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.041061 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba"} err="failed to get container status \"b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba\": rpc error: code = NotFound desc = could not find container \"b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba\": container with ID starting with b27bdf6128d2f6900b695b157bd6012cae271f22bef46a84292a6dcfd0db42ba not found: ID does not exist" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.041096 4998 scope.go:117] "RemoveContainer" containerID="8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9" Feb 03 08:32:35 crc kubenswrapper[4998]: E0203 08:32:35.041525 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9\": container with ID starting with 8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9 not found: ID does not exist" containerID="8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9" Feb 03 08:32:35 crc kubenswrapper[4998]: I0203 08:32:35.041549 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9"} err="failed to get container status \"8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9\": rpc error: code = NotFound desc = could not find container \"8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9\": container with ID starting with 8d721f0644b0712aaa53585d165f57a8164881b43d440781f306bd401d6be6a9 not found: ID does not exist" Feb 03 08:32:36 crc kubenswrapper[4998]: I0203 08:32:36.438364 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" path="/var/lib/kubelet/pods/3fbc06a3-1332-4698-ba6f-4ac16830f999/volumes" Feb 03 08:32:42 crc kubenswrapper[4998]: I0203 08:32:42.754004 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:32:42 crc kubenswrapper[4998]: I0203 08:32:42.754720 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:33:12 crc kubenswrapper[4998]: I0203 08:33:12.753822 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:33:12 crc kubenswrapper[4998]: I0203 08:33:12.754376 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:33:42 crc kubenswrapper[4998]: I0203 08:33:42.754224 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:33:42 crc kubenswrapper[4998]: I0203 08:33:42.754835 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:33:42 crc kubenswrapper[4998]: I0203 08:33:42.754884 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:33:42 crc kubenswrapper[4998]: I0203 08:33:42.755575 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:33:42 crc kubenswrapper[4998]: I0203 08:33:42.755629 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c" gracePeriod=600 Feb 03 08:33:43 crc kubenswrapper[4998]: I0203 08:33:43.466637 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c" exitCode=0 Feb 03 08:33:43 crc kubenswrapper[4998]: I0203 08:33:43.466773 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c"} Feb 03 08:33:43 crc kubenswrapper[4998]: I0203 08:33:43.467137 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed"} Feb 03 08:33:43 crc kubenswrapper[4998]: I0203 08:33:43.467171 4998 scope.go:117] "RemoveContainer" containerID="898cd48af269365b509b96f6cec85784df08eaf849650d4d9d793db3638ff97d" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.866884 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:21 crc kubenswrapper[4998]: E0203 08:34:21.868045 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="extract-content" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.868063 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="extract-content" Feb 03 08:34:21 crc kubenswrapper[4998]: E0203 08:34:21.868080 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="extract-utilities" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.868090 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="extract-utilities" Feb 03 08:34:21 crc kubenswrapper[4998]: E0203 08:34:21.868098 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="registry-server" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.868107 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="registry-server" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.868276 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fbc06a3-1332-4698-ba6f-4ac16830f999" containerName="registry-server" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.869399 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:21 crc kubenswrapper[4998]: I0203 08:34:21.882959 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.027270 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.027342 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.027471 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgvht\" (UniqueName: \"kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.128823 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgvht\" (UniqueName: \"kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.128929 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.128959 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.129468 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.129520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.151030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgvht\" (UniqueName: \"kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht\") pod \"community-operators-qzzzg\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.225813 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.700586 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:22 crc kubenswrapper[4998]: I0203 08:34:22.741426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerStarted","Data":"56b0c3385406c0c2b760e74e08ebbb94dba86e333e6a982b9afd50ad6741901f"} Feb 03 08:34:23 crc kubenswrapper[4998]: I0203 08:34:23.772218 4998 generic.go:334] "Generic (PLEG): container finished" podID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerID="43dd7648a72c1e048afd8011fdd91f19ea487162de257fbc95d2fbe7d13ebf39" exitCode=0 Feb 03 08:34:23 crc kubenswrapper[4998]: I0203 08:34:23.772363 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerDied","Data":"43dd7648a72c1e048afd8011fdd91f19ea487162de257fbc95d2fbe7d13ebf39"} Feb 03 08:34:24 crc kubenswrapper[4998]: I0203 08:34:24.780549 4998 generic.go:334] "Generic (PLEG): container finished" podID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerID="775329e165d2bbda63e6669b066f3bbd9d25b2e0948eb244f7dda33664bf2b7a" exitCode=0 Feb 03 08:34:24 crc kubenswrapper[4998]: I0203 08:34:24.780949 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerDied","Data":"775329e165d2bbda63e6669b066f3bbd9d25b2e0948eb244f7dda33664bf2b7a"} Feb 03 08:34:25 crc kubenswrapper[4998]: I0203 08:34:25.798194 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerStarted","Data":"011f7fb04ee59199acb2a4528e8e7e9476be33031c30f3e47712325a78023cb1"} Feb 03 08:34:25 crc kubenswrapper[4998]: I0203 08:34:25.818086 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qzzzg" podStartSLOduration=3.344743609 podStartE2EDuration="4.818068941s" podCreationTimestamp="2026-02-03 08:34:21 +0000 UTC" firstStartedPulling="2026-02-03 08:34:23.773863902 +0000 UTC m=+6502.060557698" lastFinishedPulling="2026-02-03 08:34:25.247189224 +0000 UTC m=+6503.533883030" observedRunningTime="2026-02-03 08:34:25.813189182 +0000 UTC m=+6504.099883008" watchObservedRunningTime="2026-02-03 08:34:25.818068941 +0000 UTC m=+6504.104762747" Feb 03 08:34:32 crc kubenswrapper[4998]: I0203 08:34:32.225987 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:32 crc kubenswrapper[4998]: I0203 08:34:32.226757 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:32 crc kubenswrapper[4998]: I0203 08:34:32.272519 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:32 crc kubenswrapper[4998]: I0203 08:34:32.887301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:32 crc kubenswrapper[4998]: I0203 08:34:32.936917 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:34 crc kubenswrapper[4998]: I0203 08:34:34.860337 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qzzzg" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="registry-server" containerID="cri-o://011f7fb04ee59199acb2a4528e8e7e9476be33031c30f3e47712325a78023cb1" gracePeriod=2 Feb 03 08:34:35 crc kubenswrapper[4998]: I0203 08:34:35.869695 4998 generic.go:334] "Generic (PLEG): container finished" podID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerID="011f7fb04ee59199acb2a4528e8e7e9476be33031c30f3e47712325a78023cb1" exitCode=0 Feb 03 08:34:35 crc kubenswrapper[4998]: I0203 08:34:35.869764 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerDied","Data":"011f7fb04ee59199acb2a4528e8e7e9476be33031c30f3e47712325a78023cb1"} Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.472303 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.541112 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities\") pod \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.541324 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content\") pod \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.541378 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgvht\" (UniqueName: \"kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht\") pod \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\" (UID: \"44e42bca-9c1e-44e4-8029-52c0a72cdb17\") " Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.542392 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities" (OuterVolumeSpecName: "utilities") pod "44e42bca-9c1e-44e4-8029-52c0a72cdb17" (UID: "44e42bca-9c1e-44e4-8029-52c0a72cdb17"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.551102 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht" (OuterVolumeSpecName: "kube-api-access-tgvht") pod "44e42bca-9c1e-44e4-8029-52c0a72cdb17" (UID: "44e42bca-9c1e-44e4-8029-52c0a72cdb17"). InnerVolumeSpecName "kube-api-access-tgvht". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.599272 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44e42bca-9c1e-44e4-8029-52c0a72cdb17" (UID: "44e42bca-9c1e-44e4-8029-52c0a72cdb17"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.647340 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.647394 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44e42bca-9c1e-44e4-8029-52c0a72cdb17-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.647410 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgvht\" (UniqueName: \"kubernetes.io/projected/44e42bca-9c1e-44e4-8029-52c0a72cdb17-kube-api-access-tgvht\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.880383 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzzzg" event={"ID":"44e42bca-9c1e-44e4-8029-52c0a72cdb17","Type":"ContainerDied","Data":"56b0c3385406c0c2b760e74e08ebbb94dba86e333e6a982b9afd50ad6741901f"} Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.880452 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzzzg" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.881004 4998 scope.go:117] "RemoveContainer" containerID="011f7fb04ee59199acb2a4528e8e7e9476be33031c30f3e47712325a78023cb1" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.921936 4998 scope.go:117] "RemoveContainer" containerID="775329e165d2bbda63e6669b066f3bbd9d25b2e0948eb244f7dda33664bf2b7a" Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.939236 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.943752 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qzzzg"] Feb 03 08:34:36 crc kubenswrapper[4998]: I0203 08:34:36.946879 4998 scope.go:117] "RemoveContainer" containerID="43dd7648a72c1e048afd8011fdd91f19ea487162de257fbc95d2fbe7d13ebf39" Feb 03 08:34:38 crc kubenswrapper[4998]: I0203 08:34:38.436027 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" path="/var/lib/kubelet/pods/44e42bca-9c1e-44e4-8029-52c0a72cdb17/volumes" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.515505 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:40 crc kubenswrapper[4998]: E0203 08:34:40.516581 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="extract-content" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.516600 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="extract-content" Feb 03 08:34:40 crc kubenswrapper[4998]: E0203 08:34:40.516615 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="extract-utilities" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.516621 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="extract-utilities" Feb 03 08:34:40 crc kubenswrapper[4998]: E0203 08:34:40.516648 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="registry-server" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.516655 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="registry-server" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.516890 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e42bca-9c1e-44e4-8029-52c0a72cdb17" containerName="registry-server" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.518227 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.537410 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.605531 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28zcs\" (UniqueName: \"kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.605650 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.605726 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.707295 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.707366 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28zcs\" (UniqueName: \"kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.707444 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.707892 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.707931 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.729405 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28zcs\" (UniqueName: \"kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs\") pod \"redhat-marketplace-tk2mw\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:40 crc kubenswrapper[4998]: I0203 08:34:40.839557 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:41 crc kubenswrapper[4998]: I0203 08:34:41.082549 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:41 crc kubenswrapper[4998]: I0203 08:34:41.921750 4998 generic.go:334] "Generic (PLEG): container finished" podID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerID="30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671" exitCode=0 Feb 03 08:34:41 crc kubenswrapper[4998]: I0203 08:34:41.922044 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerDied","Data":"30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671"} Feb 03 08:34:41 crc kubenswrapper[4998]: I0203 08:34:41.923756 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerStarted","Data":"3013498317b45f0247a2f91825a7fef60b7087268f6d2a4529bea9fd606ec286"} Feb 03 08:34:42 crc kubenswrapper[4998]: I0203 08:34:42.934691 4998 generic.go:334] "Generic (PLEG): container finished" podID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerID="b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58" exitCode=0 Feb 03 08:34:42 crc kubenswrapper[4998]: I0203 08:34:42.934762 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerDied","Data":"b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58"} Feb 03 08:34:43 crc kubenswrapper[4998]: I0203 08:34:43.945757 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerStarted","Data":"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf"} Feb 03 08:34:43 crc kubenswrapper[4998]: I0203 08:34:43.973973 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tk2mw" podStartSLOduration=2.5514821789999997 podStartE2EDuration="3.97395377s" podCreationTimestamp="2026-02-03 08:34:40 +0000 UTC" firstStartedPulling="2026-02-03 08:34:41.9244652 +0000 UTC m=+6520.211158996" lastFinishedPulling="2026-02-03 08:34:43.346936781 +0000 UTC m=+6521.633630587" observedRunningTime="2026-02-03 08:34:43.967773014 +0000 UTC m=+6522.254466870" watchObservedRunningTime="2026-02-03 08:34:43.97395377 +0000 UTC m=+6522.260647576" Feb 03 08:34:50 crc kubenswrapper[4998]: I0203 08:34:50.840310 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:50 crc kubenswrapper[4998]: I0203 08:34:50.841168 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:50 crc kubenswrapper[4998]: I0203 08:34:50.884736 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:51 crc kubenswrapper[4998]: I0203 08:34:51.046550 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:51 crc kubenswrapper[4998]: I0203 08:34:51.127090 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.016581 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tk2mw" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="registry-server" containerID="cri-o://ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf" gracePeriod=2 Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.399487 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.490352 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content\") pod \"30ec2935-e48b-4a8c-bbd5-7406149b4689\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.490412 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities\") pod \"30ec2935-e48b-4a8c-bbd5-7406149b4689\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.490515 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28zcs\" (UniqueName: \"kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs\") pod \"30ec2935-e48b-4a8c-bbd5-7406149b4689\" (UID: \"30ec2935-e48b-4a8c-bbd5-7406149b4689\") " Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.491842 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities" (OuterVolumeSpecName: "utilities") pod "30ec2935-e48b-4a8c-bbd5-7406149b4689" (UID: "30ec2935-e48b-4a8c-bbd5-7406149b4689"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.501502 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs" (OuterVolumeSpecName: "kube-api-access-28zcs") pod "30ec2935-e48b-4a8c-bbd5-7406149b4689" (UID: "30ec2935-e48b-4a8c-bbd5-7406149b4689"). InnerVolumeSpecName "kube-api-access-28zcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.518142 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30ec2935-e48b-4a8c-bbd5-7406149b4689" (UID: "30ec2935-e48b-4a8c-bbd5-7406149b4689"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.592658 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28zcs\" (UniqueName: \"kubernetes.io/projected/30ec2935-e48b-4a8c-bbd5-7406149b4689-kube-api-access-28zcs\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.592695 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:53 crc kubenswrapper[4998]: I0203 08:34:53.592704 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30ec2935-e48b-4a8c-bbd5-7406149b4689-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.028935 4998 generic.go:334] "Generic (PLEG): container finished" podID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerID="ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf" exitCode=0 Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.029031 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerDied","Data":"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf"} Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.029087 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk2mw" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.029146 4998 scope.go:117] "RemoveContainer" containerID="ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.029103 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk2mw" event={"ID":"30ec2935-e48b-4a8c-bbd5-7406149b4689","Type":"ContainerDied","Data":"3013498317b45f0247a2f91825a7fef60b7087268f6d2a4529bea9fd606ec286"} Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.058697 4998 scope.go:117] "RemoveContainer" containerID="b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.078977 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.081830 4998 scope.go:117] "RemoveContainer" containerID="30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.084379 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk2mw"] Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.113028 4998 scope.go:117] "RemoveContainer" containerID="ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf" Feb 03 08:34:54 crc kubenswrapper[4998]: E0203 08:34:54.113549 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf\": container with ID starting with ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf not found: ID does not exist" containerID="ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.113636 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf"} err="failed to get container status \"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf\": rpc error: code = NotFound desc = could not find container \"ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf\": container with ID starting with ffe4e62b3ff25fe3b05669117fb035cefd620ef3b80ab306a4a570f32f9994cf not found: ID does not exist" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.113676 4998 scope.go:117] "RemoveContainer" containerID="b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58" Feb 03 08:34:54 crc kubenswrapper[4998]: E0203 08:34:54.114367 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58\": container with ID starting with b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58 not found: ID does not exist" containerID="b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.114416 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58"} err="failed to get container status \"b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58\": rpc error: code = NotFound desc = could not find container \"b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58\": container with ID starting with b393bde1f5f9a4b8990a501a0ef41ccc8bb1b2d358239cb2d13065a4312eeb58 not found: ID does not exist" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.114453 4998 scope.go:117] "RemoveContainer" containerID="30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671" Feb 03 08:34:54 crc kubenswrapper[4998]: E0203 08:34:54.114849 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671\": container with ID starting with 30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671 not found: ID does not exist" containerID="30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.114877 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671"} err="failed to get container status \"30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671\": rpc error: code = NotFound desc = could not find container \"30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671\": container with ID starting with 30aa4d5a0c88e9789b0a0d244be22340436963937c2f995253b9c76f7a970671 not found: ID does not exist" Feb 03 08:34:54 crc kubenswrapper[4998]: I0203 08:34:54.446354 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" path="/var/lib/kubelet/pods/30ec2935-e48b-4a8c-bbd5-7406149b4689/volumes" Feb 03 08:36:12 crc kubenswrapper[4998]: I0203 08:36:12.753831 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:36:12 crc kubenswrapper[4998]: I0203 08:36:12.754551 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:36:42 crc kubenswrapper[4998]: I0203 08:36:42.754691 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:36:42 crc kubenswrapper[4998]: I0203 08:36:42.755432 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:37:12 crc kubenswrapper[4998]: I0203 08:37:12.754444 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:37:12 crc kubenswrapper[4998]: I0203 08:37:12.755405 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:37:12 crc kubenswrapper[4998]: I0203 08:37:12.755469 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:37:12 crc kubenswrapper[4998]: I0203 08:37:12.756349 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:37:12 crc kubenswrapper[4998]: I0203 08:37:12.756461 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" gracePeriod=600 Feb 03 08:37:12 crc kubenswrapper[4998]: E0203 08:37:12.879531 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:37:13 crc kubenswrapper[4998]: I0203 08:37:13.170003 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" exitCode=0 Feb 03 08:37:13 crc kubenswrapper[4998]: I0203 08:37:13.170082 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed"} Feb 03 08:37:13 crc kubenswrapper[4998]: I0203 08:37:13.170136 4998 scope.go:117] "RemoveContainer" containerID="b31c709b422a270b2cf6ad9da60b503550a133b2bc889255c1a5bd00ca0f9e9c" Feb 03 08:37:13 crc kubenswrapper[4998]: I0203 08:37:13.171189 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:37:13 crc kubenswrapper[4998]: E0203 08:37:13.171648 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:37:25 crc kubenswrapper[4998]: I0203 08:37:25.428718 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:37:25 crc kubenswrapper[4998]: E0203 08:37:25.430731 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:37:37 crc kubenswrapper[4998]: I0203 08:37:37.427074 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:37:37 crc kubenswrapper[4998]: E0203 08:37:37.428100 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:37:52 crc kubenswrapper[4998]: I0203 08:37:52.433704 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:37:52 crc kubenswrapper[4998]: E0203 08:37:52.434581 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:38:04 crc kubenswrapper[4998]: I0203 08:38:04.427917 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:38:04 crc kubenswrapper[4998]: E0203 08:38:04.428743 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:38:19 crc kubenswrapper[4998]: I0203 08:38:19.428170 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:38:19 crc kubenswrapper[4998]: E0203 08:38:19.429067 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:38:32 crc kubenswrapper[4998]: I0203 08:38:32.433698 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:38:32 crc kubenswrapper[4998]: E0203 08:38:32.434993 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:38:45 crc kubenswrapper[4998]: I0203 08:38:45.427340 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:38:45 crc kubenswrapper[4998]: E0203 08:38:45.428191 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:39:00 crc kubenswrapper[4998]: I0203 08:39:00.427977 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:39:00 crc kubenswrapper[4998]: E0203 08:39:00.428700 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.067247 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:11 crc kubenswrapper[4998]: E0203 08:39:11.068107 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="extract-content" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.068122 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="extract-content" Feb 03 08:39:11 crc kubenswrapper[4998]: E0203 08:39:11.068140 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="extract-utilities" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.068150 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="extract-utilities" Feb 03 08:39:11 crc kubenswrapper[4998]: E0203 08:39:11.068172 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="registry-server" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.068180 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="registry-server" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.068354 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ec2935-e48b-4a8c-bbd5-7406149b4689" containerName="registry-server" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.070485 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.080933 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.188491 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.188845 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.188872 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br726\" (UniqueName: \"kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.290403 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.290496 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.290526 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br726\" (UniqueName: \"kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.290971 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.291369 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.317164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br726\" (UniqueName: \"kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726\") pod \"redhat-operators-vzq2h\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.393988 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.428687 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:39:11 crc kubenswrapper[4998]: E0203 08:39:11.428937 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:39:11 crc kubenswrapper[4998]: I0203 08:39:11.808982 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:12 crc kubenswrapper[4998]: I0203 08:39:12.090771 4998 generic.go:334] "Generic (PLEG): container finished" podID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerID="23574146b1798bd83469510e7dbe49199b798861bd9065fba8290f05cc0422e8" exitCode=0 Feb 03 08:39:12 crc kubenswrapper[4998]: I0203 08:39:12.090875 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerDied","Data":"23574146b1798bd83469510e7dbe49199b798861bd9065fba8290f05cc0422e8"} Feb 03 08:39:12 crc kubenswrapper[4998]: I0203 08:39:12.090906 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerStarted","Data":"0aa2e0ee6c49367cfdc7b478f5da0f402e49cf886b3dd136409fc39224238def"} Feb 03 08:39:12 crc kubenswrapper[4998]: I0203 08:39:12.093263 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:39:14 crc kubenswrapper[4998]: I0203 08:39:14.105801 4998 generic.go:334] "Generic (PLEG): container finished" podID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerID="d44e17ed91eb491285e5aa18ea689486a886bf38df72eabb382cf4847b8d390c" exitCode=0 Feb 03 08:39:14 crc kubenswrapper[4998]: I0203 08:39:14.105899 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerDied","Data":"d44e17ed91eb491285e5aa18ea689486a886bf38df72eabb382cf4847b8d390c"} Feb 03 08:39:15 crc kubenswrapper[4998]: I0203 08:39:15.117485 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerStarted","Data":"ae2798f927b62e3ee60d62b51430bd79d36183a73a895d5c3d4f6a8acb43cdeb"} Feb 03 08:39:15 crc kubenswrapper[4998]: I0203 08:39:15.138346 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vzq2h" podStartSLOduration=1.666587925 podStartE2EDuration="4.138318821s" podCreationTimestamp="2026-02-03 08:39:11 +0000 UTC" firstStartedPulling="2026-02-03 08:39:12.09304372 +0000 UTC m=+6790.379737526" lastFinishedPulling="2026-02-03 08:39:14.564774606 +0000 UTC m=+6792.851468422" observedRunningTime="2026-02-03 08:39:15.136630953 +0000 UTC m=+6793.423324759" watchObservedRunningTime="2026-02-03 08:39:15.138318821 +0000 UTC m=+6793.425012617" Feb 03 08:39:21 crc kubenswrapper[4998]: I0203 08:39:21.394714 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:21 crc kubenswrapper[4998]: I0203 08:39:21.395093 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:21 crc kubenswrapper[4998]: I0203 08:39:21.456006 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:22 crc kubenswrapper[4998]: I0203 08:39:22.235810 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:22 crc kubenswrapper[4998]: I0203 08:39:22.300755 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:23 crc kubenswrapper[4998]: I0203 08:39:23.427882 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:39:23 crc kubenswrapper[4998]: E0203 08:39:23.428082 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:39:24 crc kubenswrapper[4998]: I0203 08:39:24.191712 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vzq2h" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="registry-server" containerID="cri-o://ae2798f927b62e3ee60d62b51430bd79d36183a73a895d5c3d4f6a8acb43cdeb" gracePeriod=2 Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.210264 4998 generic.go:334] "Generic (PLEG): container finished" podID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerID="ae2798f927b62e3ee60d62b51430bd79d36183a73a895d5c3d4f6a8acb43cdeb" exitCode=0 Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.210336 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerDied","Data":"ae2798f927b62e3ee60d62b51430bd79d36183a73a895d5c3d4f6a8acb43cdeb"} Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.431358 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.605079 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br726\" (UniqueName: \"kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726\") pod \"7cd64394-de66-4c54-b07c-4d358fae0ea3\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.605127 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content\") pod \"7cd64394-de66-4c54-b07c-4d358fae0ea3\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.605215 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities\") pod \"7cd64394-de66-4c54-b07c-4d358fae0ea3\" (UID: \"7cd64394-de66-4c54-b07c-4d358fae0ea3\") " Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.606340 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities" (OuterVolumeSpecName: "utilities") pod "7cd64394-de66-4c54-b07c-4d358fae0ea3" (UID: "7cd64394-de66-4c54-b07c-4d358fae0ea3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.610863 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726" (OuterVolumeSpecName: "kube-api-access-br726") pod "7cd64394-de66-4c54-b07c-4d358fae0ea3" (UID: "7cd64394-de66-4c54-b07c-4d358fae0ea3"). InnerVolumeSpecName "kube-api-access-br726". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.706544 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.706586 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br726\" (UniqueName: \"kubernetes.io/projected/7cd64394-de66-4c54-b07c-4d358fae0ea3-kube-api-access-br726\") on node \"crc\" DevicePath \"\"" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.728669 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7cd64394-de66-4c54-b07c-4d358fae0ea3" (UID: "7cd64394-de66-4c54-b07c-4d358fae0ea3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:39:26 crc kubenswrapper[4998]: I0203 08:39:26.807810 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cd64394-de66-4c54-b07c-4d358fae0ea3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.227602 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vzq2h" event={"ID":"7cd64394-de66-4c54-b07c-4d358fae0ea3","Type":"ContainerDied","Data":"0aa2e0ee6c49367cfdc7b478f5da0f402e49cf886b3dd136409fc39224238def"} Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.227688 4998 scope.go:117] "RemoveContainer" containerID="ae2798f927b62e3ee60d62b51430bd79d36183a73a895d5c3d4f6a8acb43cdeb" Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.227745 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vzq2h" Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.261453 4998 scope.go:117] "RemoveContainer" containerID="d44e17ed91eb491285e5aa18ea689486a886bf38df72eabb382cf4847b8d390c" Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.284016 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.309503 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vzq2h"] Feb 03 08:39:27 crc kubenswrapper[4998]: I0203 08:39:27.312328 4998 scope.go:117] "RemoveContainer" containerID="23574146b1798bd83469510e7dbe49199b798861bd9065fba8290f05cc0422e8" Feb 03 08:39:28 crc kubenswrapper[4998]: I0203 08:39:28.437480 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" path="/var/lib/kubelet/pods/7cd64394-de66-4c54-b07c-4d358fae0ea3/volumes" Feb 03 08:39:35 crc kubenswrapper[4998]: I0203 08:39:35.428339 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:39:35 crc kubenswrapper[4998]: E0203 08:39:35.429253 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:39:50 crc kubenswrapper[4998]: I0203 08:39:50.427269 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:39:50 crc kubenswrapper[4998]: E0203 08:39:50.429367 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:40:01 crc kubenswrapper[4998]: I0203 08:40:01.428058 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:40:01 crc kubenswrapper[4998]: E0203 08:40:01.429437 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:40:16 crc kubenswrapper[4998]: I0203 08:40:16.427852 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:40:16 crc kubenswrapper[4998]: E0203 08:40:16.428631 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:40:27 crc kubenswrapper[4998]: I0203 08:40:27.428040 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:40:27 crc kubenswrapper[4998]: E0203 08:40:27.430866 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:40:38 crc kubenswrapper[4998]: I0203 08:40:38.427959 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:40:38 crc kubenswrapper[4998]: E0203 08:40:38.428847 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:40:53 crc kubenswrapper[4998]: I0203 08:40:53.427321 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:40:53 crc kubenswrapper[4998]: E0203 08:40:53.428036 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:41:05 crc kubenswrapper[4998]: I0203 08:41:05.427399 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:41:05 crc kubenswrapper[4998]: E0203 08:41:05.428250 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:41:20 crc kubenswrapper[4998]: I0203 08:41:20.427838 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:41:20 crc kubenswrapper[4998]: E0203 08:41:20.428735 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.563603 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-p7mlq"] Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.574059 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-p7mlq"] Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.671703 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zlmxj"] Feb 03 08:41:24 crc kubenswrapper[4998]: E0203 08:41:24.672157 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="extract-content" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.672184 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="extract-content" Feb 03 08:41:24 crc kubenswrapper[4998]: E0203 08:41:24.672223 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="extract-utilities" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.672238 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="extract-utilities" Feb 03 08:41:24 crc kubenswrapper[4998]: E0203 08:41:24.672261 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="registry-server" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.672272 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="registry-server" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.672510 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cd64394-de66-4c54-b07c-4d358fae0ea3" containerName="registry-server" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.673446 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.676397 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.676711 4998 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tk9nc" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.676556 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.681202 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zlmxj"] Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.684875 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.829721 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.829833 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.829887 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plftp\" (UniqueName: \"kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.931416 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plftp\" (UniqueName: \"kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.931490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.931531 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.931803 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.932333 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.949214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plftp\" (UniqueName: \"kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp\") pod \"crc-storage-crc-zlmxj\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:24 crc kubenswrapper[4998]: I0203 08:41:24.994446 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:25 crc kubenswrapper[4998]: I0203 08:41:25.454116 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zlmxj"] Feb 03 08:41:26 crc kubenswrapper[4998]: I0203 08:41:26.076732 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zlmxj" event={"ID":"ac9d1695-ef7f-4c3e-a913-873800faa884","Type":"ContainerStarted","Data":"4e02c48621421ecf391993be7b252f6a96ea0c3817c38ce9a47263ff7a4d573d"} Feb 03 08:41:26 crc kubenswrapper[4998]: I0203 08:41:26.436000 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53366df6-d5e6-42e3-883e-8050cb593284" path="/var/lib/kubelet/pods/53366df6-d5e6-42e3-883e-8050cb593284/volumes" Feb 03 08:41:27 crc kubenswrapper[4998]: I0203 08:41:27.086656 4998 generic.go:334] "Generic (PLEG): container finished" podID="ac9d1695-ef7f-4c3e-a913-873800faa884" containerID="a4845f5f068c50e40c8d58b551b4fbb27bca9cc759dfa4396550c4071d02189d" exitCode=0 Feb 03 08:41:27 crc kubenswrapper[4998]: I0203 08:41:27.086716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zlmxj" event={"ID":"ac9d1695-ef7f-4c3e-a913-873800faa884","Type":"ContainerDied","Data":"a4845f5f068c50e40c8d58b551b4fbb27bca9cc759dfa4396550c4071d02189d"} Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.399513 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.583272 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt\") pod \"ac9d1695-ef7f-4c3e-a913-873800faa884\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.583362 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plftp\" (UniqueName: \"kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp\") pod \"ac9d1695-ef7f-4c3e-a913-873800faa884\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.583469 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage\") pod \"ac9d1695-ef7f-4c3e-a913-873800faa884\" (UID: \"ac9d1695-ef7f-4c3e-a913-873800faa884\") " Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.584844 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "ac9d1695-ef7f-4c3e-a913-873800faa884" (UID: "ac9d1695-ef7f-4c3e-a913-873800faa884"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.589713 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp" (OuterVolumeSpecName: "kube-api-access-plftp") pod "ac9d1695-ef7f-4c3e-a913-873800faa884" (UID: "ac9d1695-ef7f-4c3e-a913-873800faa884"). InnerVolumeSpecName "kube-api-access-plftp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.605027 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "ac9d1695-ef7f-4c3e-a913-873800faa884" (UID: "ac9d1695-ef7f-4c3e-a913-873800faa884"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.685363 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plftp\" (UniqueName: \"kubernetes.io/projected/ac9d1695-ef7f-4c3e-a913-873800faa884-kube-api-access-plftp\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.685400 4998 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ac9d1695-ef7f-4c3e-a913-873800faa884-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:28 crc kubenswrapper[4998]: I0203 08:41:28.685425 4998 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ac9d1695-ef7f-4c3e-a913-873800faa884-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:29 crc kubenswrapper[4998]: I0203 08:41:29.100479 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zlmxj" event={"ID":"ac9d1695-ef7f-4c3e-a913-873800faa884","Type":"ContainerDied","Data":"4e02c48621421ecf391993be7b252f6a96ea0c3817c38ce9a47263ff7a4d573d"} Feb 03 08:41:29 crc kubenswrapper[4998]: I0203 08:41:29.100520 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e02c48621421ecf391993be7b252f6a96ea0c3817c38ce9a47263ff7a4d573d" Feb 03 08:41:29 crc kubenswrapper[4998]: I0203 08:41:29.100568 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zlmxj" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.501999 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zlmxj"] Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.508128 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zlmxj"] Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.653351 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-x28xk"] Feb 03 08:41:30 crc kubenswrapper[4998]: E0203 08:41:30.653692 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac9d1695-ef7f-4c3e-a913-873800faa884" containerName="storage" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.653711 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac9d1695-ef7f-4c3e-a913-873800faa884" containerName="storage" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.653894 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac9d1695-ef7f-4c3e-a913-873800faa884" containerName="storage" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.654302 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.659419 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.659672 4998 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tk9nc" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.659849 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.659990 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.664837 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-x28xk"] Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.813371 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.813439 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.813564 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw6sj\" (UniqueName: \"kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.914947 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw6sj\" (UniqueName: \"kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.915033 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.915076 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.915273 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.915983 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.931650 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw6sj\" (UniqueName: \"kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj\") pod \"crc-storage-crc-x28xk\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:30 crc kubenswrapper[4998]: I0203 08:41:30.972807 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:31 crc kubenswrapper[4998]: I0203 08:41:31.401660 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-x28xk"] Feb 03 08:41:32 crc kubenswrapper[4998]: I0203 08:41:32.135094 4998 generic.go:334] "Generic (PLEG): container finished" podID="49cd705c-3f48-490c-b78e-27293413bbf1" containerID="0a269ad3d6cdf9a23c646599c94854ec38eb30dee2476409722e14377d4ef3aa" exitCode=0 Feb 03 08:41:32 crc kubenswrapper[4998]: I0203 08:41:32.135203 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x28xk" event={"ID":"49cd705c-3f48-490c-b78e-27293413bbf1","Type":"ContainerDied","Data":"0a269ad3d6cdf9a23c646599c94854ec38eb30dee2476409722e14377d4ef3aa"} Feb 03 08:41:32 crc kubenswrapper[4998]: I0203 08:41:32.135391 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x28xk" event={"ID":"49cd705c-3f48-490c-b78e-27293413bbf1","Type":"ContainerStarted","Data":"95e0fec7082179435aec3fe91c69fa76f654725a6c00c9c7a54c895bf9b5e031"} Feb 03 08:41:32 crc kubenswrapper[4998]: I0203 08:41:32.434066 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:41:32 crc kubenswrapper[4998]: E0203 08:41:32.434560 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:41:32 crc kubenswrapper[4998]: I0203 08:41:32.437629 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac9d1695-ef7f-4c3e-a913-873800faa884" path="/var/lib/kubelet/pods/ac9d1695-ef7f-4c3e-a913-873800faa884/volumes" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.458325 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.553117 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage\") pod \"49cd705c-3f48-490c-b78e-27293413bbf1\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.553236 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt\") pod \"49cd705c-3f48-490c-b78e-27293413bbf1\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.553393 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "49cd705c-3f48-490c-b78e-27293413bbf1" (UID: "49cd705c-3f48-490c-b78e-27293413bbf1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.554226 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw6sj\" (UniqueName: \"kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj\") pod \"49cd705c-3f48-490c-b78e-27293413bbf1\" (UID: \"49cd705c-3f48-490c-b78e-27293413bbf1\") " Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.555944 4998 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/49cd705c-3f48-490c-b78e-27293413bbf1-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.558606 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj" (OuterVolumeSpecName: "kube-api-access-bw6sj") pod "49cd705c-3f48-490c-b78e-27293413bbf1" (UID: "49cd705c-3f48-490c-b78e-27293413bbf1"). InnerVolumeSpecName "kube-api-access-bw6sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.569877 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "49cd705c-3f48-490c-b78e-27293413bbf1" (UID: "49cd705c-3f48-490c-b78e-27293413bbf1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.657121 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw6sj\" (UniqueName: \"kubernetes.io/projected/49cd705c-3f48-490c-b78e-27293413bbf1-kube-api-access-bw6sj\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:33 crc kubenswrapper[4998]: I0203 08:41:33.657157 4998 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/49cd705c-3f48-490c-b78e-27293413bbf1-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 03 08:41:34 crc kubenswrapper[4998]: I0203 08:41:34.151242 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x28xk" event={"ID":"49cd705c-3f48-490c-b78e-27293413bbf1","Type":"ContainerDied","Data":"95e0fec7082179435aec3fe91c69fa76f654725a6c00c9c7a54c895bf9b5e031"} Feb 03 08:41:34 crc kubenswrapper[4998]: I0203 08:41:34.151280 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95e0fec7082179435aec3fe91c69fa76f654725a6c00c9c7a54c895bf9b5e031" Feb 03 08:41:34 crc kubenswrapper[4998]: I0203 08:41:34.151333 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x28xk" Feb 03 08:41:47 crc kubenswrapper[4998]: I0203 08:41:47.428144 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:41:47 crc kubenswrapper[4998]: E0203 08:41:47.428886 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:42:00 crc kubenswrapper[4998]: I0203 08:42:00.428436 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:42:00 crc kubenswrapper[4998]: E0203 08:42:00.429290 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:42:08 crc kubenswrapper[4998]: I0203 08:42:08.008507 4998 scope.go:117] "RemoveContainer" containerID="a8e4648698b19bf57f7161c2e808ea88424ddabb968ad3769b7f072296c0b0d0" Feb 03 08:42:11 crc kubenswrapper[4998]: I0203 08:42:11.428654 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:42:11 crc kubenswrapper[4998]: E0203 08:42:11.429414 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:42:23 crc kubenswrapper[4998]: I0203 08:42:23.427858 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:42:24 crc kubenswrapper[4998]: I0203 08:42:24.489476 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343"} Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.856894 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:37 crc kubenswrapper[4998]: E0203 08:42:37.857728 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cd705c-3f48-490c-b78e-27293413bbf1" containerName="storage" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.857741 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cd705c-3f48-490c-b78e-27293413bbf1" containerName="storage" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.857902 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cd705c-3f48-490c-b78e-27293413bbf1" containerName="storage" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.858943 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.875305 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.889544 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb745\" (UniqueName: \"kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.889587 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.889614 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.990415 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb745\" (UniqueName: \"kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.990475 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.990504 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.990971 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:37 crc kubenswrapper[4998]: I0203 08:42:37.991097 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:38 crc kubenswrapper[4998]: I0203 08:42:38.020114 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb745\" (UniqueName: \"kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745\") pod \"certified-operators-mgn5z\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:38 crc kubenswrapper[4998]: I0203 08:42:38.181167 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:38 crc kubenswrapper[4998]: I0203 08:42:38.656361 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:39 crc kubenswrapper[4998]: I0203 08:42:39.627940 4998 generic.go:334] "Generic (PLEG): container finished" podID="5bdfaa84-3c8d-4282-a595-555366c78377" containerID="6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65" exitCode=0 Feb 03 08:42:39 crc kubenswrapper[4998]: I0203 08:42:39.628013 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerDied","Data":"6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65"} Feb 03 08:42:39 crc kubenswrapper[4998]: I0203 08:42:39.628221 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerStarted","Data":"2e18bee6358fd1bdc861c3766fa45401a7ce7f52793caabe0abb57befc91a006"} Feb 03 08:42:40 crc kubenswrapper[4998]: I0203 08:42:40.640556 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerStarted","Data":"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507"} Feb 03 08:42:41 crc kubenswrapper[4998]: I0203 08:42:41.650871 4998 generic.go:334] "Generic (PLEG): container finished" podID="5bdfaa84-3c8d-4282-a595-555366c78377" containerID="2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507" exitCode=0 Feb 03 08:42:41 crc kubenswrapper[4998]: I0203 08:42:41.650915 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerDied","Data":"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507"} Feb 03 08:42:42 crc kubenswrapper[4998]: I0203 08:42:42.659969 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerStarted","Data":"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1"} Feb 03 08:42:42 crc kubenswrapper[4998]: I0203 08:42:42.691611 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mgn5z" podStartSLOduration=2.9969399 podStartE2EDuration="5.691590813s" podCreationTimestamp="2026-02-03 08:42:37 +0000 UTC" firstStartedPulling="2026-02-03 08:42:39.630174002 +0000 UTC m=+6997.916867808" lastFinishedPulling="2026-02-03 08:42:42.324824915 +0000 UTC m=+7000.611518721" observedRunningTime="2026-02-03 08:42:42.684756969 +0000 UTC m=+7000.971450815" watchObservedRunningTime="2026-02-03 08:42:42.691590813 +0000 UTC m=+7000.978284629" Feb 03 08:42:48 crc kubenswrapper[4998]: I0203 08:42:48.181356 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:48 crc kubenswrapper[4998]: I0203 08:42:48.181982 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:48 crc kubenswrapper[4998]: I0203 08:42:48.222324 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:48 crc kubenswrapper[4998]: I0203 08:42:48.749100 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:48 crc kubenswrapper[4998]: I0203 08:42:48.798883 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:50 crc kubenswrapper[4998]: I0203 08:42:50.719307 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mgn5z" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="registry-server" containerID="cri-o://fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1" gracePeriod=2 Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.095581 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.287265 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities\") pod \"5bdfaa84-3c8d-4282-a595-555366c78377\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.287377 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content\") pod \"5bdfaa84-3c8d-4282-a595-555366c78377\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.287406 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb745\" (UniqueName: \"kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745\") pod \"5bdfaa84-3c8d-4282-a595-555366c78377\" (UID: \"5bdfaa84-3c8d-4282-a595-555366c78377\") " Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.288376 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities" (OuterVolumeSpecName: "utilities") pod "5bdfaa84-3c8d-4282-a595-555366c78377" (UID: "5bdfaa84-3c8d-4282-a595-555366c78377"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.293973 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745" (OuterVolumeSpecName: "kube-api-access-hb745") pod "5bdfaa84-3c8d-4282-a595-555366c78377" (UID: "5bdfaa84-3c8d-4282-a595-555366c78377"). InnerVolumeSpecName "kube-api-access-hb745". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.333438 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5bdfaa84-3c8d-4282-a595-555366c78377" (UID: "5bdfaa84-3c8d-4282-a595-555366c78377"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.389199 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.389258 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bdfaa84-3c8d-4282-a595-555366c78377-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.389275 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb745\" (UniqueName: \"kubernetes.io/projected/5bdfaa84-3c8d-4282-a595-555366c78377-kube-api-access-hb745\") on node \"crc\" DevicePath \"\"" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.731832 4998 generic.go:334] "Generic (PLEG): container finished" podID="5bdfaa84-3c8d-4282-a595-555366c78377" containerID="fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1" exitCode=0 Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.731921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerDied","Data":"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1"} Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.731964 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mgn5z" event={"ID":"5bdfaa84-3c8d-4282-a595-555366c78377","Type":"ContainerDied","Data":"2e18bee6358fd1bdc861c3766fa45401a7ce7f52793caabe0abb57befc91a006"} Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.731975 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mgn5z" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.731992 4998 scope.go:117] "RemoveContainer" containerID="fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.753841 4998 scope.go:117] "RemoveContainer" containerID="2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.772128 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.782973 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mgn5z"] Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.790649 4998 scope.go:117] "RemoveContainer" containerID="6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.815112 4998 scope.go:117] "RemoveContainer" containerID="fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1" Feb 03 08:42:51 crc kubenswrapper[4998]: E0203 08:42:51.815743 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1\": container with ID starting with fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1 not found: ID does not exist" containerID="fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.815794 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1"} err="failed to get container status \"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1\": rpc error: code = NotFound desc = could not find container \"fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1\": container with ID starting with fc9181cb455572dc92143bc7c3b5635f2df55b191700551726065d782e1d07a1 not found: ID does not exist" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.815816 4998 scope.go:117] "RemoveContainer" containerID="2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507" Feb 03 08:42:51 crc kubenswrapper[4998]: E0203 08:42:51.816101 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507\": container with ID starting with 2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507 not found: ID does not exist" containerID="2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.816119 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507"} err="failed to get container status \"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507\": rpc error: code = NotFound desc = could not find container \"2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507\": container with ID starting with 2c42051e41d31bde9bea8a63cbcc0e2520d739209a4937d0c050a029f05fb507 not found: ID does not exist" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.816133 4998 scope.go:117] "RemoveContainer" containerID="6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65" Feb 03 08:42:51 crc kubenswrapper[4998]: E0203 08:42:51.816372 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65\": container with ID starting with 6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65 not found: ID does not exist" containerID="6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65" Feb 03 08:42:51 crc kubenswrapper[4998]: I0203 08:42:51.816455 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65"} err="failed to get container status \"6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65\": rpc error: code = NotFound desc = could not find container \"6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65\": container with ID starting with 6439bc1b10c3b40c59b0bfe3affc0fef2169c39b0b8fff5ea82bb71f73f7ab65 not found: ID does not exist" Feb 03 08:42:52 crc kubenswrapper[4998]: I0203 08:42:52.441053 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" path="/var/lib/kubelet/pods/5bdfaa84-3c8d-4282-a595-555366c78377/volumes" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.405570 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:43:30 crc kubenswrapper[4998]: E0203 08:43:30.406453 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="registry-server" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.406470 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="registry-server" Feb 03 08:43:30 crc kubenswrapper[4998]: E0203 08:43:30.406482 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="extract-content" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.406490 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="extract-content" Feb 03 08:43:30 crc kubenswrapper[4998]: E0203 08:43:30.406512 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="extract-utilities" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.406522 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="extract-utilities" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.406721 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bdfaa84-3c8d-4282-a595-555366c78377" containerName="registry-server" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.427752 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.431542 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.432009 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.432353 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-rfbkk" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.432465 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.432696 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.439331 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.512485 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.512585 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.512626 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8jf8\" (UniqueName: \"kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.590487 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.592958 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.600532 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.613937 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.613993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8jf8\" (UniqueName: \"kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.614028 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.614801 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.615384 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.650332 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8jf8\" (UniqueName: \"kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8\") pod \"dnsmasq-dns-7fb8fdf8b7-mzj9s\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.715684 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.715768 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwx6v\" (UniqueName: \"kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.715809 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.756333 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.819345 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwx6v\" (UniqueName: \"kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.819403 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.819463 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.820346 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.821015 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.856637 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwx6v\" (UniqueName: \"kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v\") pod \"dnsmasq-dns-55cb5c8bf7-ghvq6\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.894202 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.894830 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.939979 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.941187 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:30 crc kubenswrapper[4998]: I0203 08:43:30.947846 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.025194 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.025247 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm56w\" (UniqueName: \"kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.025291 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.128504 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.128903 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.128933 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm56w\" (UniqueName: \"kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.130068 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.130669 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.179032 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm56w\" (UniqueName: \"kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w\") pod \"dnsmasq-dns-5fc7665699-8mt5s\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.195798 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.287085 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.307641 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.341932 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.383500 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.385017 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.388056 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.437802 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.437927 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltls9\" (UniqueName: \"kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.437979 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.510041 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.524022 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.526686 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.526894 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.527054 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.527575 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.527599 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-bltb9" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.536185 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.542638 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.542790 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltls9\" (UniqueName: \"kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.542861 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.544945 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.545459 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.573768 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltls9\" (UniqueName: \"kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9\") pod \"dnsmasq-dns-c657cd4d9-4bm4m\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644587 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644648 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644674 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644690 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644757 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g59l\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644824 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644852 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644908 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.644927 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.708182 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.735213 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.736641 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.744709 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7jcs5" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.744972 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.745170 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.745338 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.745512 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746162 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746214 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746252 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746278 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746316 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g59l\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746359 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746385 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746433 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.746455 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.747207 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.755315 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.755462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.755560 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.756192 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.759757 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.760950 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.764407 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.768636 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.768670 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31889b4e09366a8fa506461cb9ec6a44eb08e879ca8b66dd392a980651249a27/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.775512 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g59l\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.860379 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " pod="openstack/rabbitmq-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861480 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861515 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861543 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861564 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861583 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861624 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861652 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgkkk\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861669 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.861688 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.875335 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964341 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964402 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964426 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964443 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964495 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964521 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgkkk\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964541 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964558 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.964607 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.966087 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.966352 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.968752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.970726 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.974270 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.974340 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.974440 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.974525 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7aead48fbc508e2050a43f49d3b585333dd1d952f921244a9e529be1ad839c5/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.975423 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:31 crc kubenswrapper[4998]: I0203 08:43:31.988591 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgkkk\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.097567 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell2-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.128715 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.131322 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell2-erlang-cookie" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.131450 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell2-server-conf" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.131631 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell2-default-user" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.131969 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell2-plugins-conf" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.132095 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell2-server-dockercfg-tp7x9" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.140774 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" event={"ID":"9a7add6c-6a09-4a9d-8209-a07c50a7c311","Type":"ContainerStarted","Data":"e918f0aeb9266723b86fa22e14361b4800a37acbcfd468ef82df51dffc0f1efb"} Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.142664 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell2-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.145118 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" event={"ID":"94056d14-26b2-43ec-a1c9-b93ceac98777","Type":"ContainerStarted","Data":"101e87bc93245174f1526937968d5a2b53125b3a94c5288b8acca3455319351f"} Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.145995 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.165446 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.175328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" event={"ID":"f0facf43-e34c-4cc9-a7bf-8088f11b93c9","Type":"ContainerStarted","Data":"09123453dacfde44c042a0beeddbab8b16b58a836d7e71b11486037236c567d5"} Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.182287 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237076 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-server-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237142 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-plugins-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237175 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-confd\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237247 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2j2j\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-kube-api-access-d2j2j\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237298 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237326 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237385 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-pod-info\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237416 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-plugins\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.237636 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-erlang-cookie-secret\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339613 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2j2j\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-kube-api-access-d2j2j\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339666 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339695 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339733 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-pod-info\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339762 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-plugins\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339795 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-erlang-cookie-secret\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339820 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-server-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339838 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-plugins-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.339863 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-confd\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.341500 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-plugins\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.344514 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-pod-info\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.345544 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-server-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.350827 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.351235 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-plugins-conf\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.351297 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-rabbitmq-confd\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.352311 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.352411 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/217e1bb00e8952e66ce42ba20fcc6a7eb7e8c5c008b58b21a58c6c4f44eba507/globalmount\"" pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.357958 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-erlang-cookie-secret\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.362615 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2j2j\" (UniqueName: \"kubernetes.io/projected/e735f12e-e7a4-47d6-b87e-c0e80dbd2cee-kube-api-access-d2j2j\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.427862 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9f1ab3c-e9cb-483a-af90-1a5932636c83\") pod \"rabbitmq-cell2-server-0\" (UID: \"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee\") " pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.446006 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.462920 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.510077 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell3-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.511750 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.514229 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell3-default-user" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.514387 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell3-erlang-cookie" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.514524 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell3-server-conf" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.514705 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell3-server-dockercfg-fd76w" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.515466 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell3-plugins-conf" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.528454 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell3-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542665 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-pod-info\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542733 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-plugins\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542759 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brgqm\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-kube-api-access-brgqm\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542814 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542868 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542913 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-confd\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542946 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-plugins-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.542973 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-server-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.543220 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-erlang-cookie-secret\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645258 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-pod-info\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645344 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-plugins\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645363 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brgqm\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-kube-api-access-brgqm\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645411 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645445 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645501 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-confd\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645524 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-plugins-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645572 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-server-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.645665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-erlang-cookie-secret\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.647181 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.647468 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-plugins\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.649090 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-plugins-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.651159 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-server-conf\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.652503 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.652537 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b13375a0e0ca02b5b7ebe822d3e10fbd8724f44ae3a7c2e08fb519116aceadbe/globalmount\"" pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.653773 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-pod-info\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.665182 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-erlang-cookie-secret\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.667341 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-rabbitmq-confd\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.667551 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brgqm\" (UniqueName: \"kubernetes.io/projected/ca3b87c5-fc69-4b39-8ba1-a7d141370ca9-kube-api-access-brgqm\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.712629 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e185d56a-5999-43d6-9ba5-a94b52ef7388\") pod \"rabbitmq-cell3-server-0\" (UID: \"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9\") " pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.738027 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.842124 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.856722 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: W0203 08:43:32.902021 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aff891a_e736_4fbc_9c51_72a14116351e.slice/crio-02fd463802370cf64845d1ab8012587cb2369ccbc04c2c6d175aa6769915a253 WatchSource:0}: Error finding container 02fd463802370cf64845d1ab8012587cb2369ccbc04c2c6d175aa6769915a253: Status 404 returned error can't find the container with id 02fd463802370cf64845d1ab8012587cb2369ccbc04c2c6d175aa6769915a253 Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.942000 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.943775 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.949804 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.950773 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.951266 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-66s2w" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.954625 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.961991 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 03 08:43:32 crc kubenswrapper[4998]: I0203 08:43:32.964132 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.021805 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell2-server-0"] Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.053981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054031 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-default\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054076 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-kolla-config\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054101 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054180 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6mdg\" (UniqueName: \"kubernetes.io/projected/396e3696-7910-4dca-9648-f4be4d5075d2-kube-api-access-f6mdg\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054208 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-44aa5762-32bb-472e-b16d-4f6171845c92\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-44aa5762-32bb-472e-b16d-4f6171845c92\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054245 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.054269 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156194 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6mdg\" (UniqueName: \"kubernetes.io/projected/396e3696-7910-4dca-9648-f4be4d5075d2-kube-api-access-f6mdg\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156246 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-44aa5762-32bb-472e-b16d-4f6171845c92\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-44aa5762-32bb-472e-b16d-4f6171845c92\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156296 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156320 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156399 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156429 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-default\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156464 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-kolla-config\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.156485 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.157396 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.158149 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-kolla-config\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.158336 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.162332 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/396e3696-7910-4dca-9648-f4be4d5075d2-config-data-default\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.162946 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.162980 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-44aa5762-32bb-472e-b16d-4f6171845c92\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-44aa5762-32bb-472e-b16d-4f6171845c92\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f056cf71cc87df18b25228951fb4c983b44e17342012606e7fd1a3bf5f9a213/globalmount\"" pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.168876 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.170677 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/396e3696-7910-4dca-9648-f4be4d5075d2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.182897 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6mdg\" (UniqueName: \"kubernetes.io/projected/396e3696-7910-4dca-9648-f4be4d5075d2-kube-api-access-f6mdg\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.200097 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerStarted","Data":"02fd463802370cf64845d1ab8012587cb2369ccbc04c2c6d175aa6769915a253"} Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.202181 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerStarted","Data":"d8dc26ec6f1191711cc40a826a33061eb3f3ed1214fd75ffa016da7c18cb531e"} Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.203275 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerStarted","Data":"e2a72e09745e52e4be947c258331107eb394141f6584a150ee7bab06ef09b438"} Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.205037 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell2-server-0" event={"ID":"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee","Type":"ContainerStarted","Data":"b53cd87e040a8e1adadc3788872fd2a9f03929a94b41835278c7bdd438a3d380"} Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.222529 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-44aa5762-32bb-472e-b16d-4f6171845c92\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-44aa5762-32bb-472e-b16d-4f6171845c92\") pod \"openstack-galera-0\" (UID: \"396e3696-7910-4dca-9648-f4be4d5075d2\") " pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.295168 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.436475 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell3-server-0"] Feb 03 08:43:33 crc kubenswrapper[4998]: W0203 08:43:33.446603 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca3b87c5_fc69_4b39_8ba1_a7d141370ca9.slice/crio-c4e1723f86fe607ff880a9a1dbd27f91724682b2706d52f0da6e5ea894e75b48 WatchSource:0}: Error finding container c4e1723f86fe607ff880a9a1dbd27f91724682b2706d52f0da6e5ea894e75b48: Status 404 returned error can't find the container with id c4e1723f86fe607ff880a9a1dbd27f91724682b2706d52f0da6e5ea894e75b48 Feb 03 08:43:33 crc kubenswrapper[4998]: I0203 08:43:33.853186 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.218956 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell3-server-0" event={"ID":"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9","Type":"ContainerStarted","Data":"c4e1723f86fe607ff880a9a1dbd27f91724682b2706d52f0da6e5ea894e75b48"} Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.221338 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"396e3696-7910-4dca-9648-f4be4d5075d2","Type":"ContainerStarted","Data":"71661eb9b9442a598059f52176610b50852f09003ecbdc5e6b2fb5f5f5313b97"} Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.267364 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.272114 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.275795 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-nchb2" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.276199 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.276422 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.276303 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.302134 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387739 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387822 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387849 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387894 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387911 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.387964 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.388062 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.388189 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2gsk\" (UniqueName: \"kubernetes.io/projected/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kube-api-access-d2gsk\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493424 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493474 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493507 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493541 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493562 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493648 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.493840 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2gsk\" (UniqueName: \"kubernetes.io/projected/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kube-api-access-d2gsk\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.497766 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.498190 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.497250 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.498257 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.500306 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.502061 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.502477 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.502526 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/498d9dea30359d6089a3e391f9e20e90a384e16504080075a46050b4c72e77c3/globalmount\"" pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.516858 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2gsk\" (UniqueName: \"kubernetes.io/projected/d3f299c1-ab12-4ba1-80fc-6d286f546d1d-kube-api-access-d2gsk\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.544361 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b7a6208-819b-45ea-8dbb-98381483b9db\") pod \"openstack-cell1-galera-0\" (UID: \"d3f299c1-ab12-4ba1-80fc-6d286f546d1d\") " pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:34 crc kubenswrapper[4998]: I0203 08:43:34.606879 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: W0203 08:43:35.150665 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3f299c1_ab12_4ba1_80fc_6d286f546d1d.slice/crio-2592cd6664e7683eb2564bccf3f565c790f541939fb2c1a8c6e5eb6a5a2e1633 WatchSource:0}: Error finding container 2592cd6664e7683eb2564bccf3f565c790f541939fb2c1a8c6e5eb6a5a2e1633: Status 404 returned error can't find the container with id 2592cd6664e7683eb2564bccf3f565c790f541939fb2c1a8c6e5eb6a5a2e1633 Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.154106 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.234044 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d3f299c1-ab12-4ba1-80fc-6d286f546d1d","Type":"ContainerStarted","Data":"2592cd6664e7683eb2564bccf3f565c790f541939fb2c1a8c6e5eb6a5a2e1633"} Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.759395 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell2-galera-0"] Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.766111 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.770226 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell2-dockercfg-pnhk4" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.770307 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell2-scripts" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.770446 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell2-config-data" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.770745 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell2-svc" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.775408 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell2-galera-0"] Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.928962 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-galera-tls-certs\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929092 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-default\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929129 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bdkt\" (UniqueName: \"kubernetes.io/projected/ef339639-5ecd-4a27-899c-4b61d5ef5031-kube-api-access-7bdkt\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929217 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-combined-ca-bundle\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929281 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-generated\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929373 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-kolla-config\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929412 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-operator-scripts\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:35 crc kubenswrapper[4998]: I0203 08:43:35.929518 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.023418 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.024449 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.029549 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.029607 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-xsznq" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030514 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030592 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-galera-tls-certs\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-default\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030669 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bdkt\" (UniqueName: \"kubernetes.io/projected/ef339639-5ecd-4a27-899c-4b61d5ef5031-kube-api-access-7bdkt\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030706 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-combined-ca-bundle\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.030748 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-generated\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.031026 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-kolla-config\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.031104 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-operator-scripts\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.036176 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-kolla-config\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.036311 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-default\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.039144 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef339639-5ecd-4a27-899c-4b61d5ef5031-operator-scripts\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.039968 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.040091 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eae785ecf09387a913c313a6b75beb0720b580d7c4044de1f1c9bed58efc1153/globalmount\"" pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.046630 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-combined-ca-bundle\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.050833 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.060746 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef339639-5ecd-4a27-899c-4b61d5ef5031-galera-tls-certs\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.075887 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ef339639-5ecd-4a27-899c-4b61d5ef5031-config-data-generated\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.076507 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bdkt\" (UniqueName: \"kubernetes.io/projected/ef339639-5ecd-4a27-899c-4b61d5ef5031-kube-api-access-7bdkt\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.137293 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-kolla-config\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.137549 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-config-data\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.144707 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk7pl\" (UniqueName: \"kubernetes.io/projected/eb644615-73e9-43ba-baab-c78d7881be54-kube-api-access-gk7pl\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.150351 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b5a0df7-b056-4167-8f3a-c342e2e70220\") pod \"openstack-cell2-galera-0\" (UID: \"ef339639-5ecd-4a27-899c-4b61d5ef5031\") " pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.246956 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk7pl\" (UniqueName: \"kubernetes.io/projected/eb644615-73e9-43ba-baab-c78d7881be54-kube-api-access-gk7pl\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.247084 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-kolla-config\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.247129 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-config-data\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.248144 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-kolla-config\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.248252 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb644615-73e9-43ba-baab-c78d7881be54-config-data\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.267687 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk7pl\" (UniqueName: \"kubernetes.io/projected/eb644615-73e9-43ba-baab-c78d7881be54-kube-api-access-gk7pl\") pod \"memcached-0\" (UID: \"eb644615-73e9-43ba-baab-c78d7881be54\") " pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.407982 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell2-galera-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.444295 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 08:43:36 crc kubenswrapper[4998]: I0203 08:43:36.908819 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell2-galera-0"] Feb 03 08:43:36 crc kubenswrapper[4998]: W0203 08:43:36.915725 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef339639_5ecd_4a27_899c_4b61d5ef5031.slice/crio-1497760c6d5b816f99c3d99f523ffd4180bc4fe1fa984011367dc502f841af9d WatchSource:0}: Error finding container 1497760c6d5b816f99c3d99f523ffd4180bc4fe1fa984011367dc502f841af9d: Status 404 returned error can't find the container with id 1497760c6d5b816f99c3d99f523ffd4180bc4fe1fa984011367dc502f841af9d Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.010558 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 08:43:37 crc kubenswrapper[4998]: W0203 08:43:37.015657 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb644615_73e9_43ba_baab_c78d7881be54.slice/crio-a432aa4e6827cf9dae9acd37aafc4b7fa2ded6dbc233b929e2a9f860e88180c5 WatchSource:0}: Error finding container a432aa4e6827cf9dae9acd37aafc4b7fa2ded6dbc233b929e2a9f860e88180c5: Status 404 returned error can't find the container with id a432aa4e6827cf9dae9acd37aafc4b7fa2ded6dbc233b929e2a9f860e88180c5 Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.179873 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell3-galera-0"] Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.181066 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.183078 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell3-svc" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.187392 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell3-dockercfg-fgwdw" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.187803 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell3-scripts" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.188474 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell3-config-data" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.194069 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell3-galera-0"] Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.251251 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell2-galera-0" event={"ID":"ef339639-5ecd-4a27-899c-4b61d5ef5031","Type":"ContainerStarted","Data":"1497760c6d5b816f99c3d99f523ffd4180bc4fe1fa984011367dc502f841af9d"} Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.252478 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"eb644615-73e9-43ba-baab-c78d7881be54","Type":"ContainerStarted","Data":"a432aa4e6827cf9dae9acd37aafc4b7fa2ded6dbc233b929e2a9f860e88180c5"} Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265741 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-galera-tls-certs\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265838 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-combined-ca-bundle\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265857 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-operator-scripts\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265870 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-kolla-config\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265888 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th5z4\" (UniqueName: \"kubernetes.io/projected/001c7640-7c7d-4cc2-846e-d9af02321908-kube-api-access-th5z4\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265906 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-generated\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265959 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1567b65f-287d-43ff-b96f-deed5d42d971\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1567b65f-287d-43ff-b96f-deed5d42d971\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.265988 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-default\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.367509 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1567b65f-287d-43ff-b96f-deed5d42d971\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1567b65f-287d-43ff-b96f-deed5d42d971\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.367581 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-default\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.367613 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-galera-tls-certs\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.368828 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-default\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.369038 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-combined-ca-bundle\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.369065 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-operator-scripts\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.369414 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-kolla-config\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.369434 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th5z4\" (UniqueName: \"kubernetes.io/projected/001c7640-7c7d-4cc2-846e-d9af02321908-kube-api-access-th5z4\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.369487 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-generated\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.370562 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/001c7640-7c7d-4cc2-846e-d9af02321908-config-data-generated\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.370877 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-kolla-config\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.372624 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/001c7640-7c7d-4cc2-846e-d9af02321908-operator-scripts\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.372794 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.372827 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1567b65f-287d-43ff-b96f-deed5d42d971\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1567b65f-287d-43ff-b96f-deed5d42d971\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f9726d102a7dca0be23ab0cbd9765b23f955b3988ceaaec075705333f5db79dc/globalmount\"" pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.374742 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-combined-ca-bundle\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.387414 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/001c7640-7c7d-4cc2-846e-d9af02321908-galera-tls-certs\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.389037 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th5z4\" (UniqueName: \"kubernetes.io/projected/001c7640-7c7d-4cc2-846e-d9af02321908-kube-api-access-th5z4\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.411966 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1567b65f-287d-43ff-b96f-deed5d42d971\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1567b65f-287d-43ff-b96f-deed5d42d971\") pod \"openstack-cell3-galera-0\" (UID: \"001c7640-7c7d-4cc2-846e-d9af02321908\") " pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.508196 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell3-galera-0" Feb 03 08:43:37 crc kubenswrapper[4998]: I0203 08:43:37.979229 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell3-galera-0"] Feb 03 08:43:38 crc kubenswrapper[4998]: I0203 08:43:38.260734 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell3-galera-0" event={"ID":"001c7640-7c7d-4cc2-846e-d9af02321908","Type":"ContainerStarted","Data":"50c0fb8e3b08f4b3d5c1f15a91aa9edca2332fb99b33a10a3cdb77f101648561"} Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.631579 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.632255 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.632437 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-th5z4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell3-galera-0_openstack(001c7640-7c7d-4cc2-846e-d9af02321908): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.633698 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell3-galera-0" podUID="001c7640-7c7d-4cc2-846e-d9af02321908" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.768621 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.768690 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.768874 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7bdkt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell2-galera-0_openstack(ef339639-5ecd-4a27-899c-4b61d5ef5031): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.770939 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell2-galera-0" podUID="ef339639-5ecd-4a27-899c-4b61d5ef5031" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.794757 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.794890 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.795093 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d2gsk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(d3f299c1-ab12-4ba1-80fc-6d286f546d1d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.796491 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.796546 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.796546 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="d3f299c1-ab12-4ba1-80fc-6d286f546d1d" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.796679 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f6mdg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(396e3696-7910-4dca-9648-f4be4d5075d2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.797858 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="396e3696-7910-4dca-9648-f4be4d5075d2" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826109 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826174 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826285 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n59dh596h64h55bh578h8bh585h57dh564h84h5f5h596hchcfh5c6h8h66fh68bh84h7dh675h5f9h58chfchc5h677h557hbdh8fh645h77h56dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qm56w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5fc7665699-8mt5s_openstack(f0facf43-e34c-4cc9-a7bf-8088f11b93c9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826362 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826386 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.826470 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-rabbitmq:cac82611632fe132c9e9b85f289b0dbc,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5g59l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c22f1671-0f4d-4269-a228-c3abf1e9218f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.827630 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.827635 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" podUID="f0facf43-e34c-4cc9-a7bf-8088f11b93c9" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.942641 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.942741 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.942938 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cfh5d8h5bfh595h555hf9h57bh584h5fbh7ch674h579h6ch5c4h8dh545hb6h6hc6h7bh557hcdh544h54fh5ffh556h646h96h554h664h5d8h57dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ltls9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-c657cd4d9-4bm4m_openstack(db49e31a-f281-4aa3-9fd1-c6609141ed0e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:09 crc kubenswrapper[4998]: E0203 08:44:09.944154 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.334396 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-memcached:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.334454 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-memcached:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.334654 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-memcached:cac82611632fe132c9e9b85f289b0dbc,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n597h5h68fh689h5f8h5f6h5ddh5f9h8ch679h684h687hb9hbch687h57dh5bdh5b8hbh9dh645h5c7hbh57bh57ch686h5bbh5ddh58h597h559h646q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gk7pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(eb644615-73e9-43ba-baab-c78d7881be54): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.336050 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="eb644615-73e9-43ba-baab-c78d7881be54" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.527292 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="d3f299c1-ab12-4ba1-80fc-6d286f546d1d" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.527610 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/openstack-cell3-galera-0" podUID="001c7640-7c7d-4cc2-846e-d9af02321908" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.527665 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-memcached:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/memcached-0" podUID="eb644615-73e9-43ba-baab-c78d7881be54" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.527713 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/openstack-galera-0" podUID="396e3696-7910-4dca-9648-f4be4d5075d2" Feb 03 08:44:10 crc kubenswrapper[4998]: E0203 08:44:10.528419 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-mariadb:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/openstack-cell2-galera-0" podUID="ef339639-5ecd-4a27-899c-4b61d5ef5031" Feb 03 08:44:10 crc kubenswrapper[4998]: I0203 08:44:10.980984 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.045015 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc\") pod \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.045115 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm56w\" (UniqueName: \"kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w\") pod \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.045197 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config\") pod \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\" (UID: \"f0facf43-e34c-4cc9-a7bf-8088f11b93c9\") " Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.045578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f0facf43-e34c-4cc9-a7bf-8088f11b93c9" (UID: "f0facf43-e34c-4cc9-a7bf-8088f11b93c9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.045959 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config" (OuterVolumeSpecName: "config") pod "f0facf43-e34c-4cc9-a7bf-8088f11b93c9" (UID: "f0facf43-e34c-4cc9-a7bf-8088f11b93c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.069111 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w" (OuterVolumeSpecName: "kube-api-access-qm56w") pod "f0facf43-e34c-4cc9-a7bf-8088f11b93c9" (UID: "f0facf43-e34c-4cc9-a7bf-8088f11b93c9"). InnerVolumeSpecName "kube-api-access-qm56w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.146893 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.146933 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm56w\" (UniqueName: \"kubernetes.io/projected/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-kube-api-access-qm56w\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.146942 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0facf43-e34c-4cc9-a7bf-8088f11b93c9-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.532580 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerStarted","Data":"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901"} Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.533956 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" event={"ID":"f0facf43-e34c-4cc9-a7bf-8088f11b93c9","Type":"ContainerDied","Data":"09123453dacfde44c042a0beeddbab8b16b58a836d7e71b11486037236c567d5"} Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.533962 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fc7665699-8mt5s" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.538227 4998 generic.go:334] "Generic (PLEG): container finished" podID="9a7add6c-6a09-4a9d-8209-a07c50a7c311" containerID="d75b7ea0c535bc8d2b46d6a572569e56c430e2348b434bfe1bd9acc31e1497e2" exitCode=0 Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.538301 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" event={"ID":"9a7add6c-6a09-4a9d-8209-a07c50a7c311","Type":"ContainerDied","Data":"d75b7ea0c535bc8d2b46d6a572569e56c430e2348b434bfe1bd9acc31e1497e2"} Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.540168 4998 generic.go:334] "Generic (PLEG): container finished" podID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerID="0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973" exitCode=0 Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.540209 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" event={"ID":"94056d14-26b2-43ec-a1c9-b93ceac98777","Type":"ContainerDied","Data":"0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973"} Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.922716 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.955666 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:44:11 crc kubenswrapper[4998]: I0203 08:44:11.975270 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fc7665699-8mt5s"] Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.065505 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config\") pod \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.065633 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwx6v\" (UniqueName: \"kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v\") pod \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.065749 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc\") pod \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\" (UID: \"9a7add6c-6a09-4a9d-8209-a07c50a7c311\") " Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.073579 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v" (OuterVolumeSpecName: "kube-api-access-nwx6v") pod "9a7add6c-6a09-4a9d-8209-a07c50a7c311" (UID: "9a7add6c-6a09-4a9d-8209-a07c50a7c311"). InnerVolumeSpecName "kube-api-access-nwx6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.086722 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9a7add6c-6a09-4a9d-8209-a07c50a7c311" (UID: "9a7add6c-6a09-4a9d-8209-a07c50a7c311"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.087058 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config" (OuterVolumeSpecName: "config") pod "9a7add6c-6a09-4a9d-8209-a07c50a7c311" (UID: "9a7add6c-6a09-4a9d-8209-a07c50a7c311"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:12 crc kubenswrapper[4998]: E0203 08:44:12.142828 4998 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Feb 03 08:44:12 crc kubenswrapper[4998]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/94056d14-26b2-43ec-a1c9-b93ceac98777/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 08:44:12 crc kubenswrapper[4998]: > podSandboxID="101e87bc93245174f1526937968d5a2b53125b3a94c5288b8acca3455319351f" Feb 03 08:44:12 crc kubenswrapper[4998]: E0203 08:44:12.143133 4998 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 08:44:12 crc kubenswrapper[4998]: container &Container{Name:dnsmasq-dns,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:cac82611632fe132c9e9b85f289b0dbc,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8chc6h5bh56fh546hb7hc8h67h5bchffh577h697h5b5h5bdh59bhf6hf4h558hb5h578h595h5cchfbh644h59ch7fh654h547h587h5cbh5d5h8fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r8jf8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7fb8fdf8b7-mzj9s_openstack(94056d14-26b2-43ec-a1c9-b93ceac98777): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/94056d14-26b2-43ec-a1c9-b93ceac98777/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 08:44:12 crc kubenswrapper[4998]: > logger="UnhandledError" Feb 03 08:44:12 crc kubenswrapper[4998]: E0203 08:44:12.144426 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/94056d14-26b2-43ec-a1c9-b93ceac98777/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.170054 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.170086 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a7add6c-6a09-4a9d-8209-a07c50a7c311-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.170095 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwx6v\" (UniqueName: \"kubernetes.io/projected/9a7add6c-6a09-4a9d-8209-a07c50a7c311-kube-api-access-nwx6v\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.438265 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0facf43-e34c-4cc9-a7bf-8088f11b93c9" path="/var/lib/kubelet/pods/f0facf43-e34c-4cc9-a7bf-8088f11b93c9/volumes" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.554894 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell3-server-0" event={"ID":"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9","Type":"ContainerStarted","Data":"74386e2f48b47e6d843fd5882e2efa9f1120de232df432798360801fe105cf59"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.556289 4998 generic.go:334] "Generic (PLEG): container finished" podID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerID="8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901" exitCode=0 Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.556374 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerDied","Data":"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.557694 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" event={"ID":"9a7add6c-6a09-4a9d-8209-a07c50a7c311","Type":"ContainerDied","Data":"e918f0aeb9266723b86fa22e14361b4800a37acbcfd468ef82df51dffc0f1efb"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.557992 4998 scope.go:117] "RemoveContainer" containerID="d75b7ea0c535bc8d2b46d6a572569e56c430e2348b434bfe1bd9acc31e1497e2" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.558072 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55cb5c8bf7-ghvq6" Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.562240 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell2-server-0" event={"ID":"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee","Type":"ContainerStarted","Data":"47f9c6514fa03b1cc0e6c06fa963b72ce078e9d1060390a9057f382565687a2d"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.570519 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerStarted","Data":"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.581310 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerStarted","Data":"a983e50daf28ae63131eccdb607204a895f6e3e2902e1d84d24afb7903da9093"} Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.621638 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:44:12 crc kubenswrapper[4998]: I0203 08:44:12.628273 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55cb5c8bf7-ghvq6"] Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.588586 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" event={"ID":"94056d14-26b2-43ec-a1c9-b93ceac98777","Type":"ContainerStarted","Data":"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1"} Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.589062 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.591420 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerStarted","Data":"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40"} Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.591645 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.611210 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" podStartSLOduration=4.495659718 podStartE2EDuration="43.611187678s" podCreationTimestamp="2026-02-03 08:43:30 +0000 UTC" firstStartedPulling="2026-02-03 08:43:31.21290132 +0000 UTC m=+7049.499595126" lastFinishedPulling="2026-02-03 08:44:10.32842928 +0000 UTC m=+7088.615123086" observedRunningTime="2026-02-03 08:44:13.61019019 +0000 UTC m=+7091.896884006" watchObservedRunningTime="2026-02-03 08:44:13.611187678 +0000 UTC m=+7091.897881474" Feb 03 08:44:13 crc kubenswrapper[4998]: I0203 08:44:13.642773 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" podStartSLOduration=-9223371994.212025 podStartE2EDuration="42.642751138s" podCreationTimestamp="2026-02-03 08:43:31 +0000 UTC" firstStartedPulling="2026-02-03 08:43:32.467111105 +0000 UTC m=+7050.753804911" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:44:13.637051825 +0000 UTC m=+7091.923745691" watchObservedRunningTime="2026-02-03 08:44:13.642751138 +0000 UTC m=+7091.929444944" Feb 03 08:44:14 crc kubenswrapper[4998]: I0203 08:44:14.437935 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a7add6c-6a09-4a9d-8209-a07c50a7c311" path="/var/lib/kubelet/pods/9a7add6c-6a09-4a9d-8209-a07c50a7c311/volumes" Feb 03 08:44:20 crc kubenswrapper[4998]: I0203 08:44:20.757892 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:44:21 crc kubenswrapper[4998]: I0203 08:44:21.430959 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:44:21 crc kubenswrapper[4998]: I0203 08:44:21.710507 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:44:21 crc kubenswrapper[4998]: I0203 08:44:21.783605 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:44:21 crc kubenswrapper[4998]: I0203 08:44:21.783901 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="dnsmasq-dns" containerID="cri-o://69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1" gracePeriod=10 Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.174590 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.243361 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config\") pod \"94056d14-26b2-43ec-a1c9-b93ceac98777\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.243712 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8jf8\" (UniqueName: \"kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8\") pod \"94056d14-26b2-43ec-a1c9-b93ceac98777\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.243799 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc\") pod \"94056d14-26b2-43ec-a1c9-b93ceac98777\" (UID: \"94056d14-26b2-43ec-a1c9-b93ceac98777\") " Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.253056 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8" (OuterVolumeSpecName: "kube-api-access-r8jf8") pod "94056d14-26b2-43ec-a1c9-b93ceac98777" (UID: "94056d14-26b2-43ec-a1c9-b93ceac98777"). InnerVolumeSpecName "kube-api-access-r8jf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.284095 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94056d14-26b2-43ec-a1c9-b93ceac98777" (UID: "94056d14-26b2-43ec-a1c9-b93ceac98777"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.304043 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config" (OuterVolumeSpecName: "config") pod "94056d14-26b2-43ec-a1c9-b93ceac98777" (UID: "94056d14-26b2-43ec-a1c9-b93ceac98777"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.346520 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.346588 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8jf8\" (UniqueName: \"kubernetes.io/projected/94056d14-26b2-43ec-a1c9-b93ceac98777-kube-api-access-r8jf8\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.346599 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94056d14-26b2-43ec-a1c9-b93ceac98777-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.660618 4998 generic.go:334] "Generic (PLEG): container finished" podID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerID="69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1" exitCode=0 Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.660706 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" event={"ID":"94056d14-26b2-43ec-a1c9-b93ceac98777","Type":"ContainerDied","Data":"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1"} Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.661135 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" event={"ID":"94056d14-26b2-43ec-a1c9-b93ceac98777","Type":"ContainerDied","Data":"101e87bc93245174f1526937968d5a2b53125b3a94c5288b8acca3455319351f"} Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.660732 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.661194 4998 scope.go:117] "RemoveContainer" containerID="69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.664030 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"eb644615-73e9-43ba-baab-c78d7881be54","Type":"ContainerStarted","Data":"6f2edd89cac70109ae40269277d357bd1bd10cb3656041c250877f48d2ced155"} Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.664284 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.667851 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell2-galera-0" event={"ID":"ef339639-5ecd-4a27-899c-4b61d5ef5031","Type":"ContainerStarted","Data":"0540122771e299bc7514464780d1c3bfc6e8d2d20897aec0dfeaac47b2837c3c"} Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.671314 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell3-galera-0" event={"ID":"001c7640-7c7d-4cc2-846e-d9af02321908","Type":"ContainerStarted","Data":"941969e8bbd9d5f4ed5d88dee7fba5f3589a530d62ce97df9bd4783fe200b356"} Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.679031 4998 scope.go:117] "RemoveContainer" containerID="0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.691382 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.701862 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fb8fdf8b7-mzj9s"] Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.710870 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.069721879 podStartE2EDuration="47.71084958s" podCreationTimestamp="2026-02-03 08:43:35 +0000 UTC" firstStartedPulling="2026-02-03 08:43:37.021588797 +0000 UTC m=+7055.308282603" lastFinishedPulling="2026-02-03 08:44:21.662716508 +0000 UTC m=+7099.949410304" observedRunningTime="2026-02-03 08:44:22.708858003 +0000 UTC m=+7100.995551819" watchObservedRunningTime="2026-02-03 08:44:22.71084958 +0000 UTC m=+7100.997543386" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.744212 4998 scope.go:117] "RemoveContainer" containerID="69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1" Feb 03 08:44:22 crc kubenswrapper[4998]: E0203 08:44:22.744749 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1\": container with ID starting with 69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1 not found: ID does not exist" containerID="69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.744885 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1"} err="failed to get container status \"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1\": rpc error: code = NotFound desc = could not find container \"69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1\": container with ID starting with 69e0e9e9038169af7832cf3faf02da2bef2bb700df411897672a2d4f92e16ce1 not found: ID does not exist" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.744926 4998 scope.go:117] "RemoveContainer" containerID="0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973" Feb 03 08:44:22 crc kubenswrapper[4998]: E0203 08:44:22.745229 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973\": container with ID starting with 0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973 not found: ID does not exist" containerID="0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973" Feb 03 08:44:22 crc kubenswrapper[4998]: I0203 08:44:22.745268 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973"} err="failed to get container status \"0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973\": rpc error: code = NotFound desc = could not find container \"0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973\": container with ID starting with 0f6ed3f3266048174d281b9ff31d21b537efe23a70f71abba15d32bc555a4973 not found: ID does not exist" Feb 03 08:44:24 crc kubenswrapper[4998]: I0203 08:44:24.442333 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" path="/var/lib/kubelet/pods/94056d14-26b2-43ec-a1c9-b93ceac98777/volumes" Feb 03 08:44:24 crc kubenswrapper[4998]: I0203 08:44:24.691231 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d3f299c1-ab12-4ba1-80fc-6d286f546d1d","Type":"ContainerStarted","Data":"c3a48a0f185fc636d25c14a2983c77d51a2d8ee77c02c46820d56a5b94d82086"} Feb 03 08:44:24 crc kubenswrapper[4998]: I0203 08:44:24.692887 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"396e3696-7910-4dca-9648-f4be4d5075d2","Type":"ContainerStarted","Data":"c1a9a58f728f7c0ecfdc79474525f4b818aeec5d8f21ad8812040726ce0e53ae"} Feb 03 08:44:25 crc kubenswrapper[4998]: I0203 08:44:25.706100 4998 generic.go:334] "Generic (PLEG): container finished" podID="ef339639-5ecd-4a27-899c-4b61d5ef5031" containerID="0540122771e299bc7514464780d1c3bfc6e8d2d20897aec0dfeaac47b2837c3c" exitCode=0 Feb 03 08:44:25 crc kubenswrapper[4998]: I0203 08:44:25.706197 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell2-galera-0" event={"ID":"ef339639-5ecd-4a27-899c-4b61d5ef5031","Type":"ContainerDied","Data":"0540122771e299bc7514464780d1c3bfc6e8d2d20897aec0dfeaac47b2837c3c"} Feb 03 08:44:25 crc kubenswrapper[4998]: I0203 08:44:25.712338 4998 generic.go:334] "Generic (PLEG): container finished" podID="001c7640-7c7d-4cc2-846e-d9af02321908" containerID="941969e8bbd9d5f4ed5d88dee7fba5f3589a530d62ce97df9bd4783fe200b356" exitCode=0 Feb 03 08:44:25 crc kubenswrapper[4998]: I0203 08:44:25.712395 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell3-galera-0" event={"ID":"001c7640-7c7d-4cc2-846e-d9af02321908","Type":"ContainerDied","Data":"941969e8bbd9d5f4ed5d88dee7fba5f3589a530d62ce97df9bd4783fe200b356"} Feb 03 08:44:26 crc kubenswrapper[4998]: I0203 08:44:26.722007 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell2-galera-0" event={"ID":"ef339639-5ecd-4a27-899c-4b61d5ef5031","Type":"ContainerStarted","Data":"602bbb12bb810da4b98cb429c1d798d96fd210d8ccdfbfd582018394ca8c1cf0"} Feb 03 08:44:26 crc kubenswrapper[4998]: I0203 08:44:26.724944 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell3-galera-0" event={"ID":"001c7640-7c7d-4cc2-846e-d9af02321908","Type":"ContainerStarted","Data":"a733dbd9668e031a2c997444d481577070187a52ec28b2e7df561e0020960b2b"} Feb 03 08:44:26 crc kubenswrapper[4998]: I0203 08:44:26.751931 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell2-galera-0" podStartSLOduration=8.003860571 podStartE2EDuration="52.751905719s" podCreationTimestamp="2026-02-03 08:43:34 +0000 UTC" firstStartedPulling="2026-02-03 08:43:36.918292193 +0000 UTC m=+7055.204985999" lastFinishedPulling="2026-02-03 08:44:21.666337341 +0000 UTC m=+7099.953031147" observedRunningTime="2026-02-03 08:44:26.74737094 +0000 UTC m=+7105.034064766" watchObservedRunningTime="2026-02-03 08:44:26.751905719 +0000 UTC m=+7105.038599525" Feb 03 08:44:26 crc kubenswrapper[4998]: I0203 08:44:26.774192 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell3-galera-0" podStartSLOduration=7.105881278 podStartE2EDuration="50.774171923s" podCreationTimestamp="2026-02-03 08:43:36 +0000 UTC" firstStartedPulling="2026-02-03 08:43:37.995047981 +0000 UTC m=+7056.281741777" lastFinishedPulling="2026-02-03 08:44:21.663338616 +0000 UTC m=+7099.950032422" observedRunningTime="2026-02-03 08:44:26.773312979 +0000 UTC m=+7105.060006805" watchObservedRunningTime="2026-02-03 08:44:26.774171923 +0000 UTC m=+7105.060865729" Feb 03 08:44:27 crc kubenswrapper[4998]: I0203 08:44:27.508725 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell3-galera-0" Feb 03 08:44:27 crc kubenswrapper[4998]: I0203 08:44:27.509315 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell3-galera-0" Feb 03 08:44:28 crc kubenswrapper[4998]: I0203 08:44:28.745093 4998 generic.go:334] "Generic (PLEG): container finished" podID="d3f299c1-ab12-4ba1-80fc-6d286f546d1d" containerID="c3a48a0f185fc636d25c14a2983c77d51a2d8ee77c02c46820d56a5b94d82086" exitCode=0 Feb 03 08:44:28 crc kubenswrapper[4998]: I0203 08:44:28.745183 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d3f299c1-ab12-4ba1-80fc-6d286f546d1d","Type":"ContainerDied","Data":"c3a48a0f185fc636d25c14a2983c77d51a2d8ee77c02c46820d56a5b94d82086"} Feb 03 08:44:28 crc kubenswrapper[4998]: I0203 08:44:28.746632 4998 generic.go:334] "Generic (PLEG): container finished" podID="396e3696-7910-4dca-9648-f4be4d5075d2" containerID="c1a9a58f728f7c0ecfdc79474525f4b818aeec5d8f21ad8812040726ce0e53ae" exitCode=0 Feb 03 08:44:28 crc kubenswrapper[4998]: I0203 08:44:28.746666 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"396e3696-7910-4dca-9648-f4be4d5075d2","Type":"ContainerDied","Data":"c1a9a58f728f7c0ecfdc79474525f4b818aeec5d8f21ad8812040726ce0e53ae"} Feb 03 08:44:29 crc kubenswrapper[4998]: I0203 08:44:29.753883 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d3f299c1-ab12-4ba1-80fc-6d286f546d1d","Type":"ContainerStarted","Data":"902386ff25d7db5a6e9f48296247e62c32af82c6f88ccd202e45dadb3e30fdb0"} Feb 03 08:44:29 crc kubenswrapper[4998]: I0203 08:44:29.757966 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"396e3696-7910-4dca-9648-f4be4d5075d2","Type":"ContainerStarted","Data":"9c5f219b52ebdde2e9f81e47bb5849fa96acac12b8a194e5ac3fd51ae621103f"} Feb 03 08:44:29 crc kubenswrapper[4998]: I0203 08:44:29.783932 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371980.070871 podStartE2EDuration="56.783905082s" podCreationTimestamp="2026-02-03 08:43:33 +0000 UTC" firstStartedPulling="2026-02-03 08:43:35.152986852 +0000 UTC m=+7053.439680678" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:44:29.771106267 +0000 UTC m=+7108.057800123" watchObservedRunningTime="2026-02-03 08:44:29.783905082 +0000 UTC m=+7108.070598908" Feb 03 08:44:29 crc kubenswrapper[4998]: I0203 08:44:29.808713 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371978.046082 podStartE2EDuration="58.808694728s" podCreationTimestamp="2026-02-03 08:43:31 +0000 UTC" firstStartedPulling="2026-02-03 08:43:33.881631468 +0000 UTC m=+7052.168325264" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:44:29.80068671 +0000 UTC m=+7108.087380526" watchObservedRunningTime="2026-02-03 08:44:29.808694728 +0000 UTC m=+7108.095388544" Feb 03 08:44:31 crc kubenswrapper[4998]: I0203 08:44:31.445673 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 03 08:44:31 crc kubenswrapper[4998]: I0203 08:44:31.619199 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell3-galera-0" Feb 03 08:44:31 crc kubenswrapper[4998]: I0203 08:44:31.714638 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell3-galera-0" Feb 03 08:44:33 crc kubenswrapper[4998]: I0203 08:44:33.296238 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 03 08:44:33 crc kubenswrapper[4998]: I0203 08:44:33.296465 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 03 08:44:34 crc kubenswrapper[4998]: I0203 08:44:34.607298 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 03 08:44:34 crc kubenswrapper[4998]: I0203 08:44:34.607648 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 03 08:44:34 crc kubenswrapper[4998]: I0203 08:44:34.715603 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 03 08:44:34 crc kubenswrapper[4998]: I0203 08:44:34.857746 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.235090 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-45ntk"] Feb 03 08:44:36 crc kubenswrapper[4998]: E0203 08:44:36.235915 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7add6c-6a09-4a9d-8209-a07c50a7c311" containerName="init" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.235937 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7add6c-6a09-4a9d-8209-a07c50a7c311" containerName="init" Feb 03 08:44:36 crc kubenswrapper[4998]: E0203 08:44:36.235972 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="init" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.235982 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="init" Feb 03 08:44:36 crc kubenswrapper[4998]: E0203 08:44:36.236000 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="dnsmasq-dns" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.236011 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="dnsmasq-dns" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.236288 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a7add6c-6a09-4a9d-8209-a07c50a7c311" containerName="init" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.236317 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="94056d14-26b2-43ec-a1c9-b93ceac98777" containerName="dnsmasq-dns" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.237120 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.239506 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell3-mariadb-root-db-secret" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.249624 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-45ntk"] Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.298880 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-888hp\" (UniqueName: \"kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.299205 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.401211 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.401293 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-888hp\" (UniqueName: \"kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.402294 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.409838 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell2-galera-0" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.409899 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell2-galera-0" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.438214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-888hp\" (UniqueName: \"kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp\") pod \"root-account-create-update-45ntk\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.512960 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell2-galera-0" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.562058 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.876288 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell2-galera-0" Feb 03 08:44:36 crc kubenswrapper[4998]: I0203 08:44:36.984015 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-45ntk"] Feb 03 08:44:36 crc kubenswrapper[4998]: W0203 08:44:36.988339 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91970bba_7485_4a4a_a0f3_77a9168f484d.slice/crio-5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc WatchSource:0}: Error finding container 5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc: Status 404 returned error can't find the container with id 5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc Feb 03 08:44:37 crc kubenswrapper[4998]: I0203 08:44:37.369764 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 03 08:44:37 crc kubenswrapper[4998]: I0203 08:44:37.457122 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 03 08:44:37 crc kubenswrapper[4998]: I0203 08:44:37.822362 4998 generic.go:334] "Generic (PLEG): container finished" podID="91970bba-7485-4a4a-a0f3-77a9168f484d" containerID="e67746ec78d30aa5d2611a55a6a41e973382a2c3f888ccd748e73cd861f760b6" exitCode=0 Feb 03 08:44:37 crc kubenswrapper[4998]: I0203 08:44:37.822470 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-45ntk" event={"ID":"91970bba-7485-4a4a-a0f3-77a9168f484d","Type":"ContainerDied","Data":"e67746ec78d30aa5d2611a55a6a41e973382a2c3f888ccd748e73cd861f760b6"} Feb 03 08:44:37 crc kubenswrapper[4998]: I0203 08:44:37.822525 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-45ntk" event={"ID":"91970bba-7485-4a4a-a0f3-77a9168f484d","Type":"ContainerStarted","Data":"5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc"} Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.112118 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.140744 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-888hp\" (UniqueName: \"kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp\") pod \"91970bba-7485-4a4a-a0f3-77a9168f484d\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.140879 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts\") pod \"91970bba-7485-4a4a-a0f3-77a9168f484d\" (UID: \"91970bba-7485-4a4a-a0f3-77a9168f484d\") " Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.141341 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91970bba-7485-4a4a-a0f3-77a9168f484d" (UID: "91970bba-7485-4a4a-a0f3-77a9168f484d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.141744 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91970bba-7485-4a4a-a0f3-77a9168f484d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.156033 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp" (OuterVolumeSpecName: "kube-api-access-888hp") pod "91970bba-7485-4a4a-a0f3-77a9168f484d" (UID: "91970bba-7485-4a4a-a0f3-77a9168f484d"). InnerVolumeSpecName "kube-api-access-888hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.243330 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-888hp\" (UniqueName: \"kubernetes.io/projected/91970bba-7485-4a4a-a0f3-77a9168f484d-kube-api-access-888hp\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.848614 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-45ntk" Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.848649 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-45ntk" event={"ID":"91970bba-7485-4a4a-a0f3-77a9168f484d","Type":"ContainerDied","Data":"5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc"} Feb 03 08:44:39 crc kubenswrapper[4998]: I0203 08:44:39.848704 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a4580e6e7ac7cdbd8d61584dcf5536e5fb6f529fd0e3a39119d2e94b7d1b3fc" Feb 03 08:44:41 crc kubenswrapper[4998]: I0203 08:44:41.920257 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-45ntk"] Feb 03 08:44:41 crc kubenswrapper[4998]: I0203 08:44:41.925138 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-45ntk"] Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.010808 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-w89wp"] Feb 03 08:44:42 crc kubenswrapper[4998]: E0203 08:44:42.011111 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91970bba-7485-4a4a-a0f3-77a9168f484d" containerName="mariadb-account-create-update" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.011122 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="91970bba-7485-4a4a-a0f3-77a9168f484d" containerName="mariadb-account-create-update" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.011314 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="91970bba-7485-4a4a-a0f3-77a9168f484d" containerName="mariadb-account-create-update" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.011852 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.014569 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.021062 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-w89wp"] Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.085243 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc4wt\" (UniqueName: \"kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.085302 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.186516 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc4wt\" (UniqueName: \"kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.186584 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.188238 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.209464 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc4wt\" (UniqueName: \"kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt\") pod \"root-account-create-update-w89wp\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.355475 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.460710 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91970bba-7485-4a4a-a0f3-77a9168f484d" path="/var/lib/kubelet/pods/91970bba-7485-4a4a-a0f3-77a9168f484d/volumes" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.754256 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.754997 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.818620 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-w89wp"] Feb 03 08:44:42 crc kubenswrapper[4998]: I0203 08:44:42.883250 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-w89wp" event={"ID":"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456","Type":"ContainerStarted","Data":"86bae993bace4eb790411e356de6b1089f5f0a37c96a94ba88a8e33e93971521"} Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.893560 4998 generic.go:334] "Generic (PLEG): container finished" podID="7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" containerID="71ecd614068f85d80b96f1d6d71a1a899a38a4a74e8c581789e51e087a4060e0" exitCode=0 Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.893639 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-w89wp" event={"ID":"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456","Type":"ContainerDied","Data":"71ecd614068f85d80b96f1d6d71a1a899a38a4a74e8c581789e51e087a4060e0"} Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.896557 4998 generic.go:334] "Generic (PLEG): container finished" podID="e735f12e-e7a4-47d6-b87e-c0e80dbd2cee" containerID="47f9c6514fa03b1cc0e6c06fa963b72ce078e9d1060390a9057f382565687a2d" exitCode=0 Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.896619 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell2-server-0" event={"ID":"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee","Type":"ContainerDied","Data":"47f9c6514fa03b1cc0e6c06fa963b72ce078e9d1060390a9057f382565687a2d"} Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.899271 4998 generic.go:334] "Generic (PLEG): container finished" podID="ca3b87c5-fc69-4b39-8ba1-a7d141370ca9" containerID="74386e2f48b47e6d843fd5882e2efa9f1120de232df432798360801fe105cf59" exitCode=0 Feb 03 08:44:43 crc kubenswrapper[4998]: I0203 08:44:43.899328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell3-server-0" event={"ID":"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9","Type":"ContainerDied","Data":"74386e2f48b47e6d843fd5882e2efa9f1120de232df432798360801fe105cf59"} Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.909697 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell2-server-0" event={"ID":"e735f12e-e7a4-47d6-b87e-c0e80dbd2cee","Type":"ContainerStarted","Data":"15e1ec391572bf15b40ec5e2aaa73f252bc661b09dd261aa573b8387394ea326"} Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.910188 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.912707 4998 generic.go:334] "Generic (PLEG): container finished" podID="1aff891a-e736-4fbc-9c51-72a14116351e" containerID="a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938" exitCode=0 Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.912759 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerDied","Data":"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938"} Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.914349 4998 generic.go:334] "Generic (PLEG): container finished" podID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerID="a983e50daf28ae63131eccdb607204a895f6e3e2902e1d84d24afb7903da9093" exitCode=0 Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.914390 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerDied","Data":"a983e50daf28ae63131eccdb607204a895f6e3e2902e1d84d24afb7903da9093"} Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.918140 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell3-server-0" event={"ID":"ca3b87c5-fc69-4b39-8ba1-a7d141370ca9","Type":"ContainerStarted","Data":"8e82c7f1dab048746d38291d606b5bd9f2e29e162b4f869c61a0c57bd1413007"} Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.918915 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:44:44 crc kubenswrapper[4998]: I0203 08:44:44.964306 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell2-server-0" podStartSLOduration=36.649292964 podStartE2EDuration="1m13.96428541s" podCreationTimestamp="2026-02-03 08:43:31 +0000 UTC" firstStartedPulling="2026-02-03 08:43:33.04416467 +0000 UTC m=+7051.330858476" lastFinishedPulling="2026-02-03 08:44:10.359157106 +0000 UTC m=+7088.645850922" observedRunningTime="2026-02-03 08:44:44.954565233 +0000 UTC m=+7123.241259059" watchObservedRunningTime="2026-02-03 08:44:44.96428541 +0000 UTC m=+7123.250979236" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.017034 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell3-server-0" podStartSLOduration=37.13929549 podStartE2EDuration="1m14.017011903s" podCreationTimestamp="2026-02-03 08:43:31 +0000 UTC" firstStartedPulling="2026-02-03 08:43:33.450711557 +0000 UTC m=+7051.737405353" lastFinishedPulling="2026-02-03 08:44:10.32842796 +0000 UTC m=+7088.615121766" observedRunningTime="2026-02-03 08:44:45.007279556 +0000 UTC m=+7123.293973372" watchObservedRunningTime="2026-02-03 08:44:45.017011903 +0000 UTC m=+7123.303705709" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.237949 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.353550 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts\") pod \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.353670 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc4wt\" (UniqueName: \"kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt\") pod \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\" (UID: \"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456\") " Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.354031 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" (UID: "7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.354225 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.358084 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt" (OuterVolumeSpecName: "kube-api-access-cc4wt") pod "7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" (UID: "7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456"). InnerVolumeSpecName "kube-api-access-cc4wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.455292 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc4wt\" (UniqueName: \"kubernetes.io/projected/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456-kube-api-access-cc4wt\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.952192 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-w89wp" event={"ID":"7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456","Type":"ContainerDied","Data":"86bae993bace4eb790411e356de6b1089f5f0a37c96a94ba88a8e33e93971521"} Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.952577 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86bae993bace4eb790411e356de6b1089f5f0a37c96a94ba88a8e33e93971521" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.952199 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-w89wp" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.954076 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerStarted","Data":"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1"} Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.955303 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.960009 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerStarted","Data":"a64241fe410f087b87032762a98bf576c032de31ee5018fefaaa4e5ef85ae973"} Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.960596 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 03 08:44:45 crc kubenswrapper[4998]: I0203 08:44:45.988688 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.553170486 podStartE2EDuration="1m15.988671376s" podCreationTimestamp="2026-02-03 08:43:30 +0000 UTC" firstStartedPulling="2026-02-03 08:43:32.926994361 +0000 UTC m=+7051.213688167" lastFinishedPulling="2026-02-03 08:44:10.362495241 +0000 UTC m=+7088.649189057" observedRunningTime="2026-02-03 08:44:45.980719309 +0000 UTC m=+7124.267413125" watchObservedRunningTime="2026-02-03 08:44:45.988671376 +0000 UTC m=+7124.275365172" Feb 03 08:44:46 crc kubenswrapper[4998]: I0203 08:44:46.006470 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371960.848326 podStartE2EDuration="1m16.006450152s" podCreationTimestamp="2026-02-03 08:43:30 +0000 UTC" firstStartedPulling="2026-02-03 08:43:32.746653961 +0000 UTC m=+7051.033347767" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:44:46.002844509 +0000 UTC m=+7124.289538325" watchObservedRunningTime="2026-02-03 08:44:46.006450152 +0000 UTC m=+7124.293143958" Feb 03 08:44:48 crc kubenswrapper[4998]: I0203 08:44:48.238141 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-w89wp"] Feb 03 08:44:48 crc kubenswrapper[4998]: I0203 08:44:48.250142 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-w89wp"] Feb 03 08:44:48 crc kubenswrapper[4998]: I0203 08:44:48.437088 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" path="/var/lib/kubelet/pods/7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456/volumes" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.717859 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-jgl2v"] Feb 03 08:44:49 crc kubenswrapper[4998]: E0203 08:44:49.718164 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" containerName="mariadb-account-create-update" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.718176 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" containerName="mariadb-account-create-update" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.718329 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5a756d-dbb6-4d4c-a3a2-1cb81b60b456" containerName="mariadb-account-create-update" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.718807 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.721062 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell2-mariadb-root-db-secret" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.732104 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-jgl2v"] Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.826514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.826608 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv8xs\" (UniqueName: \"kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.928057 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv8xs\" (UniqueName: \"kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.928181 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.928989 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:49 crc kubenswrapper[4998]: I0203 08:44:49.950519 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv8xs\" (UniqueName: \"kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs\") pod \"root-account-create-update-jgl2v\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:50 crc kubenswrapper[4998]: I0203 08:44:50.034945 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:50 crc kubenswrapper[4998]: I0203 08:44:50.552696 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-jgl2v"] Feb 03 08:44:50 crc kubenswrapper[4998]: I0203 08:44:50.997250 4998 generic.go:334] "Generic (PLEG): container finished" podID="a4cb2e66-a168-4843-a6d2-79c5c18e48ec" containerID="1a6866c24e417591f2f935121a21e233dfc87dc6acf0fbfef89df0a20ad6155c" exitCode=0 Feb 03 08:44:50 crc kubenswrapper[4998]: I0203 08:44:50.997291 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jgl2v" event={"ID":"a4cb2e66-a168-4843-a6d2-79c5c18e48ec","Type":"ContainerDied","Data":"1a6866c24e417591f2f935121a21e233dfc87dc6acf0fbfef89df0a20ad6155c"} Feb 03 08:44:50 crc kubenswrapper[4998]: I0203 08:44:50.997314 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jgl2v" event={"ID":"a4cb2e66-a168-4843-a6d2-79c5c18e48ec","Type":"ContainerStarted","Data":"b1d40c8cbbab7a6258d8eb7a0d118f75f6903046f073585f7fae48d39bc2e518"} Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.305928 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.367161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv8xs\" (UniqueName: \"kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs\") pod \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.367264 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts\") pod \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\" (UID: \"a4cb2e66-a168-4843-a6d2-79c5c18e48ec\") " Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.368059 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a4cb2e66-a168-4843-a6d2-79c5c18e48ec" (UID: "a4cb2e66-a168-4843-a6d2-79c5c18e48ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.374228 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs" (OuterVolumeSpecName: "kube-api-access-vv8xs") pod "a4cb2e66-a168-4843-a6d2-79c5c18e48ec" (UID: "a4cb2e66-a168-4843-a6d2-79c5c18e48ec"). InnerVolumeSpecName "kube-api-access-vv8xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.469081 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv8xs\" (UniqueName: \"kubernetes.io/projected/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-kube-api-access-vv8xs\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:52 crc kubenswrapper[4998]: I0203 08:44:52.469317 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4cb2e66-a168-4843-a6d2-79c5c18e48ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:44:53 crc kubenswrapper[4998]: I0203 08:44:53.019644 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-jgl2v" event={"ID":"a4cb2e66-a168-4843-a6d2-79c5c18e48ec","Type":"ContainerDied","Data":"b1d40c8cbbab7a6258d8eb7a0d118f75f6903046f073585f7fae48d39bc2e518"} Feb 03 08:44:53 crc kubenswrapper[4998]: I0203 08:44:53.019687 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1d40c8cbbab7a6258d8eb7a0d118f75f6903046f073585f7fae48d39bc2e518" Feb 03 08:44:53 crc kubenswrapper[4998]: I0203 08:44:53.019744 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-jgl2v" Feb 03 08:44:58 crc kubenswrapper[4998]: I0203 08:44:58.246309 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-jgl2v"] Feb 03 08:44:58 crc kubenswrapper[4998]: I0203 08:44:58.258137 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-jgl2v"] Feb 03 08:44:58 crc kubenswrapper[4998]: I0203 08:44:58.438349 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4cb2e66-a168-4843-a6d2-79c5c18e48ec" path="/var/lib/kubelet/pods/a4cb2e66-a168-4843-a6d2-79c5c18e48ec/volumes" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.152191 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9"] Feb 03 08:45:00 crc kubenswrapper[4998]: E0203 08:45:00.152828 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cb2e66-a168-4843-a6d2-79c5c18e48ec" containerName="mariadb-account-create-update" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.152842 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cb2e66-a168-4843-a6d2-79c5c18e48ec" containerName="mariadb-account-create-update" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.153024 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cb2e66-a168-4843-a6d2-79c5c18e48ec" containerName="mariadb-account-create-update" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.153564 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.157033 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.157085 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.168089 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9"] Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.205236 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.205398 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptzdb\" (UniqueName: \"kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.205441 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.306992 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.307053 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptzdb\" (UniqueName: \"kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.307082 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.307971 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.320184 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.325823 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptzdb\" (UniqueName: \"kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb\") pod \"collect-profiles-29501805-ff9s9\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.475825 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:00 crc kubenswrapper[4998]: I0203 08:45:00.949840 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9"] Feb 03 08:45:00 crc kubenswrapper[4998]: W0203 08:45:00.952184 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7e72e8a_2182_4c28_bdc0_6132cd55ebca.slice/crio-42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13 WatchSource:0}: Error finding container 42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13: Status 404 returned error can't find the container with id 42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13 Feb 03 08:45:01 crc kubenswrapper[4998]: I0203 08:45:01.090269 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" event={"ID":"f7e72e8a-2182-4c28-bdc0-6132cd55ebca","Type":"ContainerStarted","Data":"42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13"} Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.108873 4998 generic.go:334] "Generic (PLEG): container finished" podID="f7e72e8a-2182-4c28-bdc0-6132cd55ebca" containerID="5dc0699d2c3fce6b8757b2fbef97c7b23af1e22bfa40eac4290da485409e8d37" exitCode=0 Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.108921 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" event={"ID":"f7e72e8a-2182-4c28-bdc0-6132cd55ebca","Type":"ContainerDied","Data":"5dc0699d2c3fce6b8757b2fbef97c7b23af1e22bfa40eac4290da485409e8d37"} Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.168708 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.185825 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.465926 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell2-server-0" Feb 03 08:45:02 crc kubenswrapper[4998]: I0203 08:45:02.845658 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell3-server-0" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.274049 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-qkznt"] Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.275979 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.283624 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.297755 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qkznt"] Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.381686 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.381850 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5v44\" (UniqueName: \"kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.412989 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.483538 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume\") pod \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.483598 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume\") pod \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.483703 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptzdb\" (UniqueName: \"kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb\") pod \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\" (UID: \"f7e72e8a-2182-4c28-bdc0-6132cd55ebca\") " Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.484153 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5v44\" (UniqueName: \"kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.484244 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.484472 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume" (OuterVolumeSpecName: "config-volume") pod "f7e72e8a-2182-4c28-bdc0-6132cd55ebca" (UID: "f7e72e8a-2182-4c28-bdc0-6132cd55ebca"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.485020 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.488771 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb" (OuterVolumeSpecName: "kube-api-access-ptzdb") pod "f7e72e8a-2182-4c28-bdc0-6132cd55ebca" (UID: "f7e72e8a-2182-4c28-bdc0-6132cd55ebca"). InnerVolumeSpecName "kube-api-access-ptzdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.493719 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f7e72e8a-2182-4c28-bdc0-6132cd55ebca" (UID: "f7e72e8a-2182-4c28-bdc0-6132cd55ebca"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.509404 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5v44\" (UniqueName: \"kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44\") pod \"root-account-create-update-qkznt\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.586228 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.586278 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.586291 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptzdb\" (UniqueName: \"kubernetes.io/projected/f7e72e8a-2182-4c28-bdc0-6132cd55ebca-kube-api-access-ptzdb\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:03 crc kubenswrapper[4998]: I0203 08:45:03.617328 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.084610 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qkznt"] Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.126514 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qkznt" event={"ID":"e429ee71-b2e9-4d43-b241-b3de2b261d9d","Type":"ContainerStarted","Data":"d22ffdcc9f93cfaee8fd13cf0b09d5849be1eb0c755abf876075a238ca58974a"} Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.128594 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" event={"ID":"f7e72e8a-2182-4c28-bdc0-6132cd55ebca","Type":"ContainerDied","Data":"42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13"} Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.128626 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42b5869f98712162e15ae5a9ccccb1ad28b60cec02c4c321fff2ddf2ce5e4f13" Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.128677 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501805-ff9s9" Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.475465 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh"] Feb 03 08:45:04 crc kubenswrapper[4998]: I0203 08:45:04.482908 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501760-mfksh"] Feb 03 08:45:05 crc kubenswrapper[4998]: I0203 08:45:05.141918 4998 generic.go:334] "Generic (PLEG): container finished" podID="e429ee71-b2e9-4d43-b241-b3de2b261d9d" containerID="9d63123d9d2b80d986be0de898fdf65ac2c14a5d2ed324fd165204982a7ccf38" exitCode=0 Feb 03 08:45:05 crc kubenswrapper[4998]: I0203 08:45:05.141971 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qkznt" event={"ID":"e429ee71-b2e9-4d43-b241-b3de2b261d9d","Type":"ContainerDied","Data":"9d63123d9d2b80d986be0de898fdf65ac2c14a5d2ed324fd165204982a7ccf38"} Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.439227 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.454601 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60117665-bb1e-46e2-b0c5-0eac0908882e" path="/var/lib/kubelet/pods/60117665-bb1e-46e2-b0c5-0eac0908882e/volumes" Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.558332 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts\") pod \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.558446 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5v44\" (UniqueName: \"kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44\") pod \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\" (UID: \"e429ee71-b2e9-4d43-b241-b3de2b261d9d\") " Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.559572 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e429ee71-b2e9-4d43-b241-b3de2b261d9d" (UID: "e429ee71-b2e9-4d43-b241-b3de2b261d9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.564551 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44" (OuterVolumeSpecName: "kube-api-access-l5v44") pod "e429ee71-b2e9-4d43-b241-b3de2b261d9d" (UID: "e429ee71-b2e9-4d43-b241-b3de2b261d9d"). InnerVolumeSpecName "kube-api-access-l5v44". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.661082 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e429ee71-b2e9-4d43-b241-b3de2b261d9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:06 crc kubenswrapper[4998]: I0203 08:45:06.661137 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5v44\" (UniqueName: \"kubernetes.io/projected/e429ee71-b2e9-4d43-b241-b3de2b261d9d-kube-api-access-l5v44\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.159272 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qkznt" event={"ID":"e429ee71-b2e9-4d43-b241-b3de2b261d9d","Type":"ContainerDied","Data":"d22ffdcc9f93cfaee8fd13cf0b09d5849be1eb0c755abf876075a238ca58974a"} Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.159927 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d22ffdcc9f93cfaee8fd13cf0b09d5849be1eb0c755abf876075a238ca58974a" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.159356 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qkznt" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.826850 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:07 crc kubenswrapper[4998]: E0203 08:45:07.827362 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e429ee71-b2e9-4d43-b241-b3de2b261d9d" containerName="mariadb-account-create-update" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.827384 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e429ee71-b2e9-4d43-b241-b3de2b261d9d" containerName="mariadb-account-create-update" Feb 03 08:45:07 crc kubenswrapper[4998]: E0203 08:45:07.827430 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7e72e8a-2182-4c28-bdc0-6132cd55ebca" containerName="collect-profiles" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.827443 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7e72e8a-2182-4c28-bdc0-6132cd55ebca" containerName="collect-profiles" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.827691 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7e72e8a-2182-4c28-bdc0-6132cd55ebca" containerName="collect-profiles" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.827725 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e429ee71-b2e9-4d43-b241-b3de2b261d9d" containerName="mariadb-account-create-update" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.829673 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.844701 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.982709 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr9wx\" (UniqueName: \"kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.982925 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:07 crc kubenswrapper[4998]: I0203 08:45:07.982982 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.084944 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.085013 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.085104 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr9wx\" (UniqueName: \"kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.085471 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.085581 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.106979 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr9wx\" (UniqueName: \"kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx\") pod \"community-operators-qlcmv\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.152692 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.218096 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.219564 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.235444 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.388654 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.389061 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.389095 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q955x\" (UniqueName: \"kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.490844 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.490931 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.490955 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q955x\" (UniqueName: \"kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.492108 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.492117 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.509582 4998 scope.go:117] "RemoveContainer" containerID="0655f84869b0708f9937ecebdf93f6bc9999c3516b089f5a3725f4d409840c75" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.512178 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q955x\" (UniqueName: \"kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x\") pod \"dnsmasq-dns-7cbff95ccf-lcrch\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.551027 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.684445 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:08 crc kubenswrapper[4998]: W0203 08:45:08.698266 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod196e37a5_e55b_4d18_be6b_411f40bcae01.slice/crio-6f519b280675b2a4b8a63a669c66fd8ea822ecfe736ec760771e2ad267004251 WatchSource:0}: Error finding container 6f519b280675b2a4b8a63a669c66fd8ea822ecfe736ec760771e2ad267004251: Status 404 returned error can't find the container with id 6f519b280675b2a4b8a63a669c66fd8ea822ecfe736ec760771e2ad267004251 Feb 03 08:45:08 crc kubenswrapper[4998]: I0203 08:45:08.916017 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.003131 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:45:09 crc kubenswrapper[4998]: W0203 08:45:09.011954 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6857c913_b07d_495c_a08f_66b849ce9d2d.slice/crio-787380ee41b3ebf79a4f419ae87260e69b4553c047e80adf8effdfee3059c892 WatchSource:0}: Error finding container 787380ee41b3ebf79a4f419ae87260e69b4553c047e80adf8effdfee3059c892: Status 404 returned error can't find the container with id 787380ee41b3ebf79a4f419ae87260e69b4553c047e80adf8effdfee3059c892 Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.175188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerStarted","Data":"212ff08b5c037b58b878e2b9ae1bcf51aaa8891373180be2eea9a4c805d46374"} Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.175262 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerStarted","Data":"787380ee41b3ebf79a4f419ae87260e69b4553c047e80adf8effdfee3059c892"} Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.177656 4998 generic.go:334] "Generic (PLEG): container finished" podID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerID="53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a" exitCode=0 Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.177743 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerDied","Data":"53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a"} Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.177769 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerStarted","Data":"6f519b280675b2a4b8a63a669c66fd8ea822ecfe736ec760771e2ad267004251"} Feb 03 08:45:09 crc kubenswrapper[4998]: I0203 08:45:09.442917 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:10 crc kubenswrapper[4998]: I0203 08:45:10.190162 4998 generic.go:334] "Generic (PLEG): container finished" podID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerID="212ff08b5c037b58b878e2b9ae1bcf51aaa8891373180be2eea9a4c805d46374" exitCode=0 Feb 03 08:45:10 crc kubenswrapper[4998]: I0203 08:45:10.190331 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerDied","Data":"212ff08b5c037b58b878e2b9ae1bcf51aaa8891373180be2eea9a4c805d46374"} Feb 03 08:45:10 crc kubenswrapper[4998]: I0203 08:45:10.194766 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerStarted","Data":"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525"} Feb 03 08:45:10 crc kubenswrapper[4998]: I0203 08:45:10.768920 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="rabbitmq" containerID="cri-o://a64241fe410f087b87032762a98bf576c032de31ee5018fefaaa4e5ef85ae973" gracePeriod=604799 Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.202401 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerStarted","Data":"416d0a895e0d7618c5bc0c009f606ed5836c0ba55ac8f2b0221a2c80b6b4dc83"} Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.202525 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.204487 4998 generic.go:334] "Generic (PLEG): container finished" podID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerID="0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525" exitCode=0 Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.204527 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerDied","Data":"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525"} Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.225564 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" podStartSLOduration=3.225544015 podStartE2EDuration="3.225544015s" podCreationTimestamp="2026-02-03 08:45:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:45:11.223535768 +0000 UTC m=+7149.510229604" watchObservedRunningTime="2026-02-03 08:45:11.225544015 +0000 UTC m=+7149.512237821" Feb 03 08:45:11 crc kubenswrapper[4998]: I0203 08:45:11.286031 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="rabbitmq" containerID="cri-o://e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1" gracePeriod=604799 Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.167371 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.3:5672: connect: connection refused" Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.183320 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.4:5672: connect: connection refused" Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.216601 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerStarted","Data":"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7"} Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.237025 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qlcmv" podStartSLOduration=2.786918406 podStartE2EDuration="5.237005532s" podCreationTimestamp="2026-02-03 08:45:07 +0000 UTC" firstStartedPulling="2026-02-03 08:45:09.179280788 +0000 UTC m=+7147.465974604" lastFinishedPulling="2026-02-03 08:45:11.629367924 +0000 UTC m=+7149.916061730" observedRunningTime="2026-02-03 08:45:12.235966843 +0000 UTC m=+7150.522660649" watchObservedRunningTime="2026-02-03 08:45:12.237005532 +0000 UTC m=+7150.523699338" Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.754028 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:45:12 crc kubenswrapper[4998]: I0203 08:45:12.754412 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.262264 4998 generic.go:334] "Generic (PLEG): container finished" podID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerID="a64241fe410f087b87032762a98bf576c032de31ee5018fefaaa4e5ef85ae973" exitCode=0 Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.262355 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerDied","Data":"a64241fe410f087b87032762a98bf576c032de31ee5018fefaaa4e5ef85ae973"} Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.386202 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549398 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g59l\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549477 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549551 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549594 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549641 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549695 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549736 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549908 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.549971 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd\") pod \"c22f1671-0f4d-4269-a228-c3abf1e9218f\" (UID: \"c22f1671-0f4d-4269-a228-c3abf1e9218f\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.550161 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.550342 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.550408 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.550702 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.557450 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.559384 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info" (OuterVolumeSpecName: "pod-info") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.564379 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c" (OuterVolumeSpecName: "persistence") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.573737 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l" (OuterVolumeSpecName: "kube-api-access-5g59l") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "kube-api-access-5g59l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.579745 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf" (OuterVolumeSpecName: "server-conf") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.635910 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c22f1671-0f4d-4269-a228-c3abf1e9218f" (UID: "c22f1671-0f4d-4269-a228-c3abf1e9218f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651666 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651718 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g59l\" (UniqueName: \"kubernetes.io/projected/c22f1671-0f4d-4269-a228-c3abf1e9218f-kube-api-access-5g59l\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651735 4998 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c22f1671-0f4d-4269-a228-c3abf1e9218f-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651746 4998 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651759 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c22f1671-0f4d-4269-a228-c3abf1e9218f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651771 4998 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c22f1671-0f4d-4269-a228-c3abf1e9218f-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651798 4998 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c22f1671-0f4d-4269-a228-c3abf1e9218f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.651835 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") on node \"crc\" " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.668280 4998 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.668467 4998 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c") on node "crc" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.752976 4998 reconciler_common.go:293] "Volume detached for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.768572 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.957857 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958115 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958281 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958457 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958537 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958649 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958667 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958755 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.958949 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.959001 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgkkk\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk\") pod \"1aff891a-e736-4fbc-9c51-72a14116351e\" (UID: \"1aff891a-e736-4fbc-9c51-72a14116351e\") " Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.959027 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.959439 4998 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.959479 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.960132 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.964499 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.964740 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info" (OuterVolumeSpecName: "pod-info") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.965541 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk" (OuterVolumeSpecName: "kube-api-access-sgkkk") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "kube-api-access-sgkkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.975962 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44" (OuterVolumeSpecName: "persistence") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "pvc-c2466990-2f77-4f99-bf97-2478fc380d44". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 08:45:17 crc kubenswrapper[4998]: I0203 08:45:17.979183 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf" (OuterVolumeSpecName: "server-conf") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.037037 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1aff891a-e736-4fbc-9c51-72a14116351e" (UID: "1aff891a-e736-4fbc-9c51-72a14116351e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067391 4998 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1aff891a-e736-4fbc-9c51-72a14116351e-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067471 4998 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") on node \"crc\" " Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067496 4998 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1aff891a-e736-4fbc-9c51-72a14116351e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067527 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067553 4998 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1aff891a-e736-4fbc-9c51-72a14116351e-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067571 4998 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.067590 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgkkk\" (UniqueName: \"kubernetes.io/projected/1aff891a-e736-4fbc-9c51-72a14116351e-kube-api-access-sgkkk\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.094410 4998 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.094620 4998 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c2466990-2f77-4f99-bf97-2478fc380d44" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44") on node "crc" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.153293 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.153348 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.168436 4998 reconciler_common.go:293] "Volume detached for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.217135 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.272621 4998 generic.go:334] "Generic (PLEG): container finished" podID="1aff891a-e736-4fbc-9c51-72a14116351e" containerID="e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1" exitCode=0 Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.272688 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerDied","Data":"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1"} Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.272714 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1aff891a-e736-4fbc-9c51-72a14116351e","Type":"ContainerDied","Data":"02fd463802370cf64845d1ab8012587cb2369ccbc04c2c6d175aa6769915a253"} Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.272711 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.272755 4998 scope.go:117] "RemoveContainer" containerID="e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.278426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c22f1671-0f4d-4269-a228-c3abf1e9218f","Type":"ContainerDied","Data":"d8dc26ec6f1191711cc40a826a33061eb3f3ed1214fd75ffa016da7c18cb531e"} Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.278460 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.335308 4998 scope.go:117] "RemoveContainer" containerID="a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.349560 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.364843 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.403950 4998 scope.go:117] "RemoveContainer" containerID="e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1" Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.405352 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1\": container with ID starting with e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1 not found: ID does not exist" containerID="e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.405390 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1"} err="failed to get container status \"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1\": rpc error: code = NotFound desc = could not find container \"e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1\": container with ID starting with e7e4017fec833f9544de0cd20ca4d56fc3435b44e15296a76c61a6f0301fc5d1 not found: ID does not exist" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.405416 4998 scope.go:117] "RemoveContainer" containerID="a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938" Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.408911 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938\": container with ID starting with a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938 not found: ID does not exist" containerID="a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.409035 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938"} err="failed to get container status \"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938\": rpc error: code = NotFound desc = could not find container \"a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938\": container with ID starting with a45fe8ae2d8b6ad12dfa56fbd069e6eb56610578cb1df9130ba4862a4edb0938 not found: ID does not exist" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.409122 4998 scope.go:117] "RemoveContainer" containerID="a64241fe410f087b87032762a98bf576c032de31ee5018fefaaa4e5ef85ae973" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.424235 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.425278 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.499863 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" path="/var/lib/kubelet/pods/1aff891a-e736-4fbc-9c51-72a14116351e/volumes" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500427 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.500665 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="setup-container" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500676 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="setup-container" Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.500690 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500696 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.500706 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="setup-container" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500711 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="setup-container" Feb 03 08:45:18 crc kubenswrapper[4998]: E0203 08:45:18.500721 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500726 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500890 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aff891a-e736-4fbc-9c51-72a14116351e" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.500901 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" containerName="rabbitmq" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.501588 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.501608 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.501618 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.503971 4998 scope.go:117] "RemoveContainer" containerID="a983e50daf28ae63131eccdb607204a895f6e3e2902e1d84d24afb7903da9093" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.505265 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.509436 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.509743 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.523246 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.523493 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.523744 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.523938 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.523967 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.524132 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.524212 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.524401 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7jcs5" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.531394 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.532740 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-bltb9" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.533066 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.556893 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.642924 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.643373 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="dnsmasq-dns" containerID="cri-o://d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40" gracePeriod=10 Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683472 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683539 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683590 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683657 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683678 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683697 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.683717 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/474e078d-6891-4062-a084-5208c534b46a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684312 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684378 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684408 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684435 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684474 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684492 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/474e078d-6891-4062-a084-5208c534b46a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684527 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j59mw\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-kube-api-access-j59mw\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684582 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06d9046e-6151-41df-a973-abc9673c43e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684601 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06d9046e-6151-41df-a973-abc9673c43e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.684627 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz9dw\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-kube-api-access-dz9dw\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785611 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785664 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785691 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785708 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785726 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785744 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785760 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/474e078d-6891-4062-a084-5208c534b46a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785776 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j59mw\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-kube-api-access-j59mw\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785818 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06d9046e-6151-41df-a973-abc9673c43e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785833 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06d9046e-6151-41df-a973-abc9673c43e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785853 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz9dw\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-kube-api-access-dz9dw\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785917 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785940 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.785962 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.786008 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.786029 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.786044 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.786059 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/474e078d-6891-4062-a084-5208c534b46a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.786975 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.787949 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.788289 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.788560 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.788636 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06d9046e-6151-41df-a973-abc9673c43e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.789048 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791278 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/474e078d-6891-4062-a084-5208c534b46a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791298 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791357 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7aead48fbc508e2050a43f49d3b585333dd1d952f921244a9e529be1ad839c5/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791487 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06d9046e-6151-41df-a973-abc9673c43e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791486 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791522 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31889b4e09366a8fa506461cb9ec6a44eb08e879ca8b66dd392a980651249a27/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791555 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/474e078d-6891-4062-a084-5208c534b46a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.791993 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.796520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06d9046e-6151-41df-a973-abc9673c43e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.802670 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06d9046e-6151-41df-a973-abc9673c43e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.803155 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz9dw\" (UniqueName: \"kubernetes.io/projected/06d9046e-6151-41df-a973-abc9673c43e8-kube-api-access-dz9dw\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.804520 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j59mw\" (UniqueName: \"kubernetes.io/projected/474e078d-6891-4062-a084-5208c534b46a-kube-api-access-j59mw\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.807561 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/474e078d-6891-4062-a084-5208c534b46a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.831459 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2466990-2f77-4f99-bf97-2478fc380d44\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2466990-2f77-4f99-bf97-2478fc380d44\") pod \"rabbitmq-cell1-server-0\" (UID: \"06d9046e-6151-41df-a973-abc9673c43e8\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.844531 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-67ebd6dd-048b-4d3c-a572-07cac8f8079c\") pod \"rabbitmq-server-0\" (UID: \"474e078d-6891-4062-a084-5208c534b46a\") " pod="openstack/rabbitmq-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.918129 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:18 crc kubenswrapper[4998]: I0203 08:45:18.925312 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.096314 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.195259 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltls9\" (UniqueName: \"kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9\") pod \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.195334 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config\") pod \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.195371 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc\") pod \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\" (UID: \"db49e31a-f281-4aa3-9fd1-c6609141ed0e\") " Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.201421 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9" (OuterVolumeSpecName: "kube-api-access-ltls9") pod "db49e31a-f281-4aa3-9fd1-c6609141ed0e" (UID: "db49e31a-f281-4aa3-9fd1-c6609141ed0e"). InnerVolumeSpecName "kube-api-access-ltls9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.231447 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "db49e31a-f281-4aa3-9fd1-c6609141ed0e" (UID: "db49e31a-f281-4aa3-9fd1-c6609141ed0e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.238834 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config" (OuterVolumeSpecName: "config") pod "db49e31a-f281-4aa3-9fd1-c6609141ed0e" (UID: "db49e31a-f281-4aa3-9fd1-c6609141ed0e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.288267 4998 generic.go:334] "Generic (PLEG): container finished" podID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerID="d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40" exitCode=0 Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.288339 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.288339 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerDied","Data":"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40"} Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.288384 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c657cd4d9-4bm4m" event={"ID":"db49e31a-f281-4aa3-9fd1-c6609141ed0e","Type":"ContainerDied","Data":"e2a72e09745e52e4be947c258331107eb394141f6584a150ee7bab06ef09b438"} Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.288402 4998 scope.go:117] "RemoveContainer" containerID="d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.296972 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltls9\" (UniqueName: \"kubernetes.io/projected/db49e31a-f281-4aa3-9fd1-c6609141ed0e-kube-api-access-ltls9\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.297002 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.297013 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db49e31a-f281-4aa3-9fd1-c6609141ed0e-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.306583 4998 scope.go:117] "RemoveContainer" containerID="8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.326592 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.330452 4998 scope.go:117] "RemoveContainer" containerID="d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40" Feb 03 08:45:19 crc kubenswrapper[4998]: E0203 08:45:19.331283 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40\": container with ID starting with d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40 not found: ID does not exist" containerID="d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.331333 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40"} err="failed to get container status \"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40\": rpc error: code = NotFound desc = could not find container \"d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40\": container with ID starting with d65dbd6a4b753403da9dde96f40e2e807c835c32a71e2d2731505135dcd2dc40 not found: ID does not exist" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.331366 4998 scope.go:117] "RemoveContainer" containerID="8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901" Feb 03 08:45:19 crc kubenswrapper[4998]: E0203 08:45:19.331983 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901\": container with ID starting with 8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901 not found: ID does not exist" containerID="8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.332027 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901"} err="failed to get container status \"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901\": rpc error: code = NotFound desc = could not find container \"8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901\": container with ID starting with 8a4c2d6241e2040ad8d36785c7dd338f4cec7d25edfd993bb25e29b8f67a4901 not found: ID does not exist" Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.332254 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c657cd4d9-4bm4m"] Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.414827 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 08:45:19 crc kubenswrapper[4998]: I0203 08:45:19.462595 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 08:45:19 crc kubenswrapper[4998]: W0203 08:45:19.469012 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06d9046e_6151_41df_a973_abc9673c43e8.slice/crio-9b830ca064c156ec09d7c27cc14a61a7bbc7c2da641bf9bd5f5ea5d79fc9dd57 WatchSource:0}: Error finding container 9b830ca064c156ec09d7c27cc14a61a7bbc7c2da641bf9bd5f5ea5d79fc9dd57: Status 404 returned error can't find the container with id 9b830ca064c156ec09d7c27cc14a61a7bbc7c2da641bf9bd5f5ea5d79fc9dd57 Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.298805 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"06d9046e-6151-41df-a973-abc9673c43e8","Type":"ContainerStarted","Data":"9b830ca064c156ec09d7c27cc14a61a7bbc7c2da641bf9bd5f5ea5d79fc9dd57"} Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.310282 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"474e078d-6891-4062-a084-5208c534b46a","Type":"ContainerStarted","Data":"565e0539ca1a328d034c80b9829642a541df2d72f26c7fb6e158c5c1aae8f3f1"} Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.312212 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qlcmv" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="registry-server" containerID="cri-o://ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7" gracePeriod=2 Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.441531 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c22f1671-0f4d-4269-a228-c3abf1e9218f" path="/var/lib/kubelet/pods/c22f1671-0f4d-4269-a228-c3abf1e9218f/volumes" Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.442633 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" path="/var/lib/kubelet/pods/db49e31a-f281-4aa3-9fd1-c6609141ed0e/volumes" Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.828899 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.926767 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr9wx\" (UniqueName: \"kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx\") pod \"196e37a5-e55b-4d18-be6b-411f40bcae01\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.926920 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities\") pod \"196e37a5-e55b-4d18-be6b-411f40bcae01\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.926948 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content\") pod \"196e37a5-e55b-4d18-be6b-411f40bcae01\" (UID: \"196e37a5-e55b-4d18-be6b-411f40bcae01\") " Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.927902 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities" (OuterVolumeSpecName: "utilities") pod "196e37a5-e55b-4d18-be6b-411f40bcae01" (UID: "196e37a5-e55b-4d18-be6b-411f40bcae01"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:20 crc kubenswrapper[4998]: I0203 08:45:20.936950 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx" (OuterVolumeSpecName: "kube-api-access-qr9wx") pod "196e37a5-e55b-4d18-be6b-411f40bcae01" (UID: "196e37a5-e55b-4d18-be6b-411f40bcae01"). InnerVolumeSpecName "kube-api-access-qr9wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.028963 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr9wx\" (UniqueName: \"kubernetes.io/projected/196e37a5-e55b-4d18-be6b-411f40bcae01-kube-api-access-qr9wx\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.028992 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.307079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "196e37a5-e55b-4d18-be6b-411f40bcae01" (UID: "196e37a5-e55b-4d18-be6b-411f40bcae01"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.318483 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"474e078d-6891-4062-a084-5208c534b46a","Type":"ContainerStarted","Data":"3ad2df75ddc323a7d633e85fe1748e9fc855394e43076d9289da697b6f284ed8"} Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.321013 4998 generic.go:334] "Generic (PLEG): container finished" podID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerID="ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7" exitCode=0 Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.321079 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerDied","Data":"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7"} Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.321098 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlcmv" event={"ID":"196e37a5-e55b-4d18-be6b-411f40bcae01","Type":"ContainerDied","Data":"6f519b280675b2a4b8a63a669c66fd8ea822ecfe736ec760771e2ad267004251"} Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.321181 4998 scope.go:117] "RemoveContainer" containerID="ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.321320 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlcmv" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.323658 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"06d9046e-6151-41df-a973-abc9673c43e8","Type":"ContainerStarted","Data":"1aabc3a0802a3ec48af7e2de2cafcada50acf1ed8e9bbdd1da8714241fbddf90"} Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.332644 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196e37a5-e55b-4d18-be6b-411f40bcae01-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.352158 4998 scope.go:117] "RemoveContainer" containerID="0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.373222 4998 scope.go:117] "RemoveContainer" containerID="53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.393221 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.399790 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qlcmv"] Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.421643 4998 scope.go:117] "RemoveContainer" containerID="ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7" Feb 03 08:45:21 crc kubenswrapper[4998]: E0203 08:45:21.422255 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7\": container with ID starting with ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7 not found: ID does not exist" containerID="ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.422285 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7"} err="failed to get container status \"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7\": rpc error: code = NotFound desc = could not find container \"ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7\": container with ID starting with ae547929488fc882671fd5f346fa9dd9d124a2362da05c33d14760644d29e4f7 not found: ID does not exist" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.422304 4998 scope.go:117] "RemoveContainer" containerID="0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525" Feb 03 08:45:21 crc kubenswrapper[4998]: E0203 08:45:21.422559 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525\": container with ID starting with 0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525 not found: ID does not exist" containerID="0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.422581 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525"} err="failed to get container status \"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525\": rpc error: code = NotFound desc = could not find container \"0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525\": container with ID starting with 0aae98dcd4676b792ee52e481b924bc01078d937be0bb26d760d93420a7d6525 not found: ID does not exist" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.422593 4998 scope.go:117] "RemoveContainer" containerID="53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a" Feb 03 08:45:21 crc kubenswrapper[4998]: E0203 08:45:21.423041 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a\": container with ID starting with 53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a not found: ID does not exist" containerID="53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a" Feb 03 08:45:21 crc kubenswrapper[4998]: I0203 08:45:21.423064 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a"} err="failed to get container status \"53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a\": rpc error: code = NotFound desc = could not find container \"53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a\": container with ID starting with 53971702250e15457dd5a601a3dfd2d1776ce84886fe6be5ff9d468c71d3bc7a not found: ID does not exist" Feb 03 08:45:22 crc kubenswrapper[4998]: I0203 08:45:22.437869 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" path="/var/lib/kubelet/pods/196e37a5-e55b-4d18-be6b-411f40bcae01/volumes" Feb 03 08:45:42 crc kubenswrapper[4998]: I0203 08:45:42.754221 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:45:42 crc kubenswrapper[4998]: I0203 08:45:42.755017 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:45:42 crc kubenswrapper[4998]: I0203 08:45:42.755077 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:45:42 crc kubenswrapper[4998]: I0203 08:45:42.755966 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:45:42 crc kubenswrapper[4998]: I0203 08:45:42.756216 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343" gracePeriod=600 Feb 03 08:45:43 crc kubenswrapper[4998]: I0203 08:45:43.534622 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343" exitCode=0 Feb 03 08:45:43 crc kubenswrapper[4998]: I0203 08:45:43.534718 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343"} Feb 03 08:45:43 crc kubenswrapper[4998]: I0203 08:45:43.534953 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6"} Feb 03 08:45:43 crc kubenswrapper[4998]: I0203 08:45:43.534975 4998 scope.go:117] "RemoveContainer" containerID="68a2e0b59f4a15c3fb480672ad458f573853ef14695099ba937f9507072081ed" Feb 03 08:45:53 crc kubenswrapper[4998]: I0203 08:45:53.617761 4998 generic.go:334] "Generic (PLEG): container finished" podID="474e078d-6891-4062-a084-5208c534b46a" containerID="3ad2df75ddc323a7d633e85fe1748e9fc855394e43076d9289da697b6f284ed8" exitCode=0 Feb 03 08:45:53 crc kubenswrapper[4998]: I0203 08:45:53.617890 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"474e078d-6891-4062-a084-5208c534b46a","Type":"ContainerDied","Data":"3ad2df75ddc323a7d633e85fe1748e9fc855394e43076d9289da697b6f284ed8"} Feb 03 08:45:53 crc kubenswrapper[4998]: I0203 08:45:53.620607 4998 generic.go:334] "Generic (PLEG): container finished" podID="06d9046e-6151-41df-a973-abc9673c43e8" containerID="1aabc3a0802a3ec48af7e2de2cafcada50acf1ed8e9bbdd1da8714241fbddf90" exitCode=0 Feb 03 08:45:53 crc kubenswrapper[4998]: I0203 08:45:53.620642 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"06d9046e-6151-41df-a973-abc9673c43e8","Type":"ContainerDied","Data":"1aabc3a0802a3ec48af7e2de2cafcada50acf1ed8e9bbdd1da8714241fbddf90"} Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.652867 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"06d9046e-6151-41df-a973-abc9673c43e8","Type":"ContainerStarted","Data":"7ab5141b1bff844c3cf6117e0b0f17a6bf54f6dd824b0019fd7b9c0ac4a776a5"} Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.654407 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.659666 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"474e078d-6891-4062-a084-5208c534b46a","Type":"ContainerStarted","Data":"d12d89e44a930b60e1681e177b10eb69a3228a2078465a8c635cae2343713713"} Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.660941 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.682586 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.68256279 podStartE2EDuration="36.68256279s" podCreationTimestamp="2026-02-03 08:45:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:45:54.678130224 +0000 UTC m=+7192.964824110" watchObservedRunningTime="2026-02-03 08:45:54.68256279 +0000 UTC m=+7192.969256616" Feb 03 08:45:54 crc kubenswrapper[4998]: I0203 08:45:54.710876 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.710854596 podStartE2EDuration="36.710854596s" podCreationTimestamp="2026-02-03 08:45:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:45:54.710574848 +0000 UTC m=+7192.997268664" watchObservedRunningTime="2026-02-03 08:45:54.710854596 +0000 UTC m=+7192.997548402" Feb 03 08:46:08 crc kubenswrapper[4998]: I0203 08:46:08.920877 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 03 08:46:08 crc kubenswrapper[4998]: I0203 08:46:08.927895 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.966799 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:14 crc kubenswrapper[4998]: E0203 08:46:14.967716 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="registry-server" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.967734 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="registry-server" Feb 03 08:46:14 crc kubenswrapper[4998]: E0203 08:46:14.967747 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="init" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.967755 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="init" Feb 03 08:46:14 crc kubenswrapper[4998]: E0203 08:46:14.967765 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="extract-utilities" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.967814 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="extract-utilities" Feb 03 08:46:14 crc kubenswrapper[4998]: E0203 08:46:14.967825 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="extract-content" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.967833 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="extract-content" Feb 03 08:46:14 crc kubenswrapper[4998]: E0203 08:46:14.967848 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="dnsmasq-dns" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.967856 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="dnsmasq-dns" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.968034 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="196e37a5-e55b-4d18-be6b-411f40bcae01" containerName="registry-server" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.968063 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="db49e31a-f281-4aa3-9fd1-c6609141ed0e" containerName="dnsmasq-dns" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.968728 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.971903 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-dwbzj" Feb 03 08:46:14 crc kubenswrapper[4998]: I0203 08:46:14.975951 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:15 crc kubenswrapper[4998]: I0203 08:46:15.103449 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmzpw\" (UniqueName: \"kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw\") pod \"mariadb-client\" (UID: \"6e6ecc01-2e99-4609-99cc-2425a8fca59c\") " pod="openstack/mariadb-client" Feb 03 08:46:15 crc kubenswrapper[4998]: I0203 08:46:15.205714 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmzpw\" (UniqueName: \"kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw\") pod \"mariadb-client\" (UID: \"6e6ecc01-2e99-4609-99cc-2425a8fca59c\") " pod="openstack/mariadb-client" Feb 03 08:46:15 crc kubenswrapper[4998]: I0203 08:46:15.229246 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmzpw\" (UniqueName: \"kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw\") pod \"mariadb-client\" (UID: \"6e6ecc01-2e99-4609-99cc-2425a8fca59c\") " pod="openstack/mariadb-client" Feb 03 08:46:15 crc kubenswrapper[4998]: I0203 08:46:15.310221 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:46:15 crc kubenswrapper[4998]: I0203 08:46:15.826579 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:16 crc kubenswrapper[4998]: I0203 08:46:16.840467 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"6e6ecc01-2e99-4609-99cc-2425a8fca59c","Type":"ContainerStarted","Data":"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4"} Feb 03 08:46:16 crc kubenswrapper[4998]: I0203 08:46:16.840851 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"6e6ecc01-2e99-4609-99cc-2425a8fca59c","Type":"ContainerStarted","Data":"966b732517f1c898fb400157945b8744c5b9b958ac8460483824ce26459d295a"} Feb 03 08:46:16 crc kubenswrapper[4998]: I0203 08:46:16.861838 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=2.290112664 podStartE2EDuration="2.861813727s" podCreationTimestamp="2026-02-03 08:46:14 +0000 UTC" firstStartedPulling="2026-02-03 08:46:15.83768883 +0000 UTC m=+7214.124382636" lastFinishedPulling="2026-02-03 08:46:16.409389883 +0000 UTC m=+7214.696083699" observedRunningTime="2026-02-03 08:46:16.856337131 +0000 UTC m=+7215.143030947" watchObservedRunningTime="2026-02-03 08:46:16.861813727 +0000 UTC m=+7215.148507543" Feb 03 08:46:34 crc kubenswrapper[4998]: E0203 08:46:34.268385 4998 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.129:44068->38.102.83.129:45201: write tcp 38.102.83.129:44068->38.102.83.129:45201: write: broken pipe Feb 03 08:46:37 crc kubenswrapper[4998]: I0203 08:46:37.338211 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:37 crc kubenswrapper[4998]: I0203 08:46:37.339379 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" containerName="mariadb-client" containerID="cri-o://770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4" gracePeriod=30 Feb 03 08:46:37 crc kubenswrapper[4998]: I0203 08:46:37.928529 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.002314 4998 generic.go:334] "Generic (PLEG): container finished" podID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" containerID="770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4" exitCode=143 Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.002368 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"6e6ecc01-2e99-4609-99cc-2425a8fca59c","Type":"ContainerDied","Data":"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4"} Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.002429 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"6e6ecc01-2e99-4609-99cc-2425a8fca59c","Type":"ContainerDied","Data":"966b732517f1c898fb400157945b8744c5b9b958ac8460483824ce26459d295a"} Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.002452 4998 scope.go:117] "RemoveContainer" containerID="770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.002667 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.030188 4998 scope.go:117] "RemoveContainer" containerID="770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4" Feb 03 08:46:38 crc kubenswrapper[4998]: E0203 08:46:38.031265 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4\": container with ID starting with 770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4 not found: ID does not exist" containerID="770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.031395 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4"} err="failed to get container status \"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4\": rpc error: code = NotFound desc = could not find container \"770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4\": container with ID starting with 770b09a6d3a3801671049d2075780e2a2c8e77d3786122e14132fd01adfc63e4 not found: ID does not exist" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.110027 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmzpw\" (UniqueName: \"kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw\") pod \"6e6ecc01-2e99-4609-99cc-2425a8fca59c\" (UID: \"6e6ecc01-2e99-4609-99cc-2425a8fca59c\") " Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.115346 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw" (OuterVolumeSpecName: "kube-api-access-qmzpw") pod "6e6ecc01-2e99-4609-99cc-2425a8fca59c" (UID: "6e6ecc01-2e99-4609-99cc-2425a8fca59c"). InnerVolumeSpecName "kube-api-access-qmzpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.211945 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmzpw\" (UniqueName: \"kubernetes.io/projected/6e6ecc01-2e99-4609-99cc-2425a8fca59c-kube-api-access-qmzpw\") on node \"crc\" DevicePath \"\"" Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.363319 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.375571 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:46:38 crc kubenswrapper[4998]: I0203 08:46:38.445021 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" path="/var/lib/kubelet/pods/6e6ecc01-2e99-4609-99cc-2425a8fca59c/volumes" Feb 03 08:48:08 crc kubenswrapper[4998]: I0203 08:48:08.726109 4998 scope.go:117] "RemoveContainer" containerID="a4845f5f068c50e40c8d58b551b4fbb27bca9cc759dfa4396550c4071d02189d" Feb 03 08:48:12 crc kubenswrapper[4998]: I0203 08:48:12.754298 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:48:12 crc kubenswrapper[4998]: I0203 08:48:12.754873 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:48:42 crc kubenswrapper[4998]: I0203 08:48:42.754473 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:48:42 crc kubenswrapper[4998]: I0203 08:48:42.755141 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:49:12 crc kubenswrapper[4998]: I0203 08:49:12.755340 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:49:12 crc kubenswrapper[4998]: I0203 08:49:12.756114 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:49:12 crc kubenswrapper[4998]: I0203 08:49:12.756164 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:49:12 crc kubenswrapper[4998]: I0203 08:49:12.756856 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:49:12 crc kubenswrapper[4998]: I0203 08:49:12.756910 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" gracePeriod=600 Feb 03 08:49:12 crc kubenswrapper[4998]: E0203 08:49:12.880578 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:49:13 crc kubenswrapper[4998]: I0203 08:49:13.824979 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" exitCode=0 Feb 03 08:49:13 crc kubenswrapper[4998]: I0203 08:49:13.825064 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6"} Feb 03 08:49:13 crc kubenswrapper[4998]: I0203 08:49:13.825549 4998 scope.go:117] "RemoveContainer" containerID="49880a38ce9c7069d48217e07b6a35e3499473215322b29e349b99efe7f5f343" Feb 03 08:49:13 crc kubenswrapper[4998]: I0203 08:49:13.826425 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:49:13 crc kubenswrapper[4998]: E0203 08:49:13.826690 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:49:27 crc kubenswrapper[4998]: I0203 08:49:27.428179 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:49:27 crc kubenswrapper[4998]: E0203 08:49:27.429017 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.319441 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:34 crc kubenswrapper[4998]: E0203 08:49:34.320476 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" containerName="mariadb-client" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.320490 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" containerName="mariadb-client" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.320655 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e6ecc01-2e99-4609-99cc-2425a8fca59c" containerName="mariadb-client" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.321951 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.342759 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.515832 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.515917 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zqbr\" (UniqueName: \"kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.515990 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.617918 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.618000 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zqbr\" (UniqueName: \"kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.618066 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.618626 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.618678 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.637747 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zqbr\" (UniqueName: \"kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr\") pod \"redhat-operators-rw8wv\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:34 crc kubenswrapper[4998]: I0203 08:49:34.646009 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:35 crc kubenswrapper[4998]: I0203 08:49:35.071380 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:36 crc kubenswrapper[4998]: I0203 08:49:36.024038 4998 generic.go:334] "Generic (PLEG): container finished" podID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerID="7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7" exitCode=0 Feb 03 08:49:36 crc kubenswrapper[4998]: I0203 08:49:36.024088 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerDied","Data":"7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7"} Feb 03 08:49:36 crc kubenswrapper[4998]: I0203 08:49:36.024376 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerStarted","Data":"8692bec1f90eba75a370d14938bbc7044b8c410112203d410b01969571947a93"} Feb 03 08:49:36 crc kubenswrapper[4998]: I0203 08:49:36.026657 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:49:37 crc kubenswrapper[4998]: I0203 08:49:37.034526 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerStarted","Data":"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae"} Feb 03 08:49:38 crc kubenswrapper[4998]: I0203 08:49:38.046077 4998 generic.go:334] "Generic (PLEG): container finished" podID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerID="79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae" exitCode=0 Feb 03 08:49:38 crc kubenswrapper[4998]: I0203 08:49:38.046161 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerDied","Data":"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae"} Feb 03 08:49:39 crc kubenswrapper[4998]: I0203 08:49:39.058340 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerStarted","Data":"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70"} Feb 03 08:49:39 crc kubenswrapper[4998]: I0203 08:49:39.089626 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rw8wv" podStartSLOduration=2.53256762 podStartE2EDuration="5.089604093s" podCreationTimestamp="2026-02-03 08:49:34 +0000 UTC" firstStartedPulling="2026-02-03 08:49:36.026252484 +0000 UTC m=+7414.312946290" lastFinishedPulling="2026-02-03 08:49:38.583288947 +0000 UTC m=+7416.869982763" observedRunningTime="2026-02-03 08:49:39.082233084 +0000 UTC m=+7417.368926910" watchObservedRunningTime="2026-02-03 08:49:39.089604093 +0000 UTC m=+7417.376297899" Feb 03 08:49:42 crc kubenswrapper[4998]: I0203 08:49:42.427597 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:49:42 crc kubenswrapper[4998]: E0203 08:49:42.428314 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:49:44 crc kubenswrapper[4998]: I0203 08:49:44.646818 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:44 crc kubenswrapper[4998]: I0203 08:49:44.646885 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:44 crc kubenswrapper[4998]: I0203 08:49:44.699087 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:45 crc kubenswrapper[4998]: I0203 08:49:45.144711 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:45 crc kubenswrapper[4998]: I0203 08:49:45.194354 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:47 crc kubenswrapper[4998]: I0203 08:49:47.132496 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rw8wv" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="registry-server" containerID="cri-o://2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70" gracePeriod=2 Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.680187 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.862529 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities\") pod \"1d4efc70-5025-46e6-a29b-3446099ffa94\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.863597 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities" (OuterVolumeSpecName: "utilities") pod "1d4efc70-5025-46e6-a29b-3446099ffa94" (UID: "1d4efc70-5025-46e6-a29b-3446099ffa94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.864569 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content\") pod \"1d4efc70-5025-46e6-a29b-3446099ffa94\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.864761 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zqbr\" (UniqueName: \"kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr\") pod \"1d4efc70-5025-46e6-a29b-3446099ffa94\" (UID: \"1d4efc70-5025-46e6-a29b-3446099ffa94\") " Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.865315 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.870358 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr" (OuterVolumeSpecName: "kube-api-access-2zqbr") pod "1d4efc70-5025-46e6-a29b-3446099ffa94" (UID: "1d4efc70-5025-46e6-a29b-3446099ffa94"). InnerVolumeSpecName "kube-api-access-2zqbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.967112 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zqbr\" (UniqueName: \"kubernetes.io/projected/1d4efc70-5025-46e6-a29b-3446099ffa94-kube-api-access-2zqbr\") on node \"crc\" DevicePath \"\"" Feb 03 08:49:48 crc kubenswrapper[4998]: I0203 08:49:48.977118 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d4efc70-5025-46e6-a29b-3446099ffa94" (UID: "1d4efc70-5025-46e6-a29b-3446099ffa94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.069729 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4efc70-5025-46e6-a29b-3446099ffa94-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.151820 4998 generic.go:334] "Generic (PLEG): container finished" podID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerID="2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70" exitCode=0 Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.151882 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rw8wv" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.151882 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerDied","Data":"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70"} Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.151938 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rw8wv" event={"ID":"1d4efc70-5025-46e6-a29b-3446099ffa94","Type":"ContainerDied","Data":"8692bec1f90eba75a370d14938bbc7044b8c410112203d410b01969571947a93"} Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.151957 4998 scope.go:117] "RemoveContainer" containerID="2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.180537 4998 scope.go:117] "RemoveContainer" containerID="79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.184056 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.191238 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rw8wv"] Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.213896 4998 scope.go:117] "RemoveContainer" containerID="7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.234662 4998 scope.go:117] "RemoveContainer" containerID="2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70" Feb 03 08:49:49 crc kubenswrapper[4998]: E0203 08:49:49.235155 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70\": container with ID starting with 2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70 not found: ID does not exist" containerID="2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.235210 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70"} err="failed to get container status \"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70\": rpc error: code = NotFound desc = could not find container \"2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70\": container with ID starting with 2f9873a99bb04d1115747eef21775f3324d0e132330f38495d712eb9228d2c70 not found: ID does not exist" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.235242 4998 scope.go:117] "RemoveContainer" containerID="79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae" Feb 03 08:49:49 crc kubenswrapper[4998]: E0203 08:49:49.235706 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae\": container with ID starting with 79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae not found: ID does not exist" containerID="79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.235757 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae"} err="failed to get container status \"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae\": rpc error: code = NotFound desc = could not find container \"79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae\": container with ID starting with 79cba22c6dd34ff421c299fc0d81326e79c95e83c7def895ed80f1e3ab12e5ae not found: ID does not exist" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.235813 4998 scope.go:117] "RemoveContainer" containerID="7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7" Feb 03 08:49:49 crc kubenswrapper[4998]: E0203 08:49:49.236254 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7\": container with ID starting with 7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7 not found: ID does not exist" containerID="7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7" Feb 03 08:49:49 crc kubenswrapper[4998]: I0203 08:49:49.236375 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7"} err="failed to get container status \"7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7\": rpc error: code = NotFound desc = could not find container \"7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7\": container with ID starting with 7ed2b9f08b36244933b5ed241212e608ae0fddf744c05dcafa229f708bd7beb7 not found: ID does not exist" Feb 03 08:49:50 crc kubenswrapper[4998]: I0203 08:49:50.445645 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" path="/var/lib/kubelet/pods/1d4efc70-5025-46e6-a29b-3446099ffa94/volumes" Feb 03 08:49:56 crc kubenswrapper[4998]: I0203 08:49:56.428896 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:49:56 crc kubenswrapper[4998]: E0203 08:49:56.429666 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:50:07 crc kubenswrapper[4998]: I0203 08:50:07.428151 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:50:07 crc kubenswrapper[4998]: E0203 08:50:07.428845 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:50:22 crc kubenswrapper[4998]: I0203 08:50:22.440502 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:50:22 crc kubenswrapper[4998]: E0203 08:50:22.441628 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:50:34 crc kubenswrapper[4998]: I0203 08:50:34.427982 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:50:34 crc kubenswrapper[4998]: E0203 08:50:34.428740 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:50:47 crc kubenswrapper[4998]: I0203 08:50:47.429161 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:50:47 crc kubenswrapper[4998]: E0203 08:50:47.430346 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:51:02 crc kubenswrapper[4998]: I0203 08:51:02.432388 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:51:02 crc kubenswrapper[4998]: E0203 08:51:02.433107 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:51:08 crc kubenswrapper[4998]: I0203 08:51:08.838545 4998 scope.go:117] "RemoveContainer" containerID="71ecd614068f85d80b96f1d6d71a1a899a38a4a74e8c581789e51e087a4060e0" Feb 03 08:51:08 crc kubenswrapper[4998]: I0203 08:51:08.859405 4998 scope.go:117] "RemoveContainer" containerID="1a6866c24e417591f2f935121a21e233dfc87dc6acf0fbfef89df0a20ad6155c" Feb 03 08:51:08 crc kubenswrapper[4998]: I0203 08:51:08.895945 4998 scope.go:117] "RemoveContainer" containerID="e67746ec78d30aa5d2611a55a6a41e973382a2c3f888ccd748e73cd861f760b6" Feb 03 08:51:13 crc kubenswrapper[4998]: I0203 08:51:13.427704 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:51:13 crc kubenswrapper[4998]: E0203 08:51:13.428685 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.842211 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Feb 03 08:51:16 crc kubenswrapper[4998]: E0203 08:51:16.842855 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="registry-server" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.842870 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="registry-server" Feb 03 08:51:16 crc kubenswrapper[4998]: E0203 08:51:16.842893 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="extract-utilities" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.842900 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="extract-utilities" Feb 03 08:51:16 crc kubenswrapper[4998]: E0203 08:51:16.842915 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="extract-content" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.842922 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="extract-content" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.843050 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d4efc70-5025-46e6-a29b-3446099ffa94" containerName="registry-server" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.843695 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.846385 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-dwbzj" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.854526 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.939364 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkpvm\" (UniqueName: \"kubernetes.io/projected/e5ea5e31-cb66-4f96-a051-0730ce894ec2-kube-api-access-lkpvm\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:16 crc kubenswrapper[4998]: I0203 08:51:16.939641 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-58239723-da4f-403e-b096-d67c0d6effac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-58239723-da4f-403e-b096-d67c0d6effac\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.041492 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkpvm\" (UniqueName: \"kubernetes.io/projected/e5ea5e31-cb66-4f96-a051-0730ce894ec2-kube-api-access-lkpvm\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.041591 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-58239723-da4f-403e-b096-d67c0d6effac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-58239723-da4f-403e-b096-d67c0d6effac\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.045950 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.045990 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-58239723-da4f-403e-b096-d67c0d6effac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-58239723-da4f-403e-b096-d67c0d6effac\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d98212bdd9753712f261fb10923badd60a984eb766bf37aef061da4f2d1d0038/globalmount\"" pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.068192 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkpvm\" (UniqueName: \"kubernetes.io/projected/e5ea5e31-cb66-4f96-a051-0730ce894ec2-kube-api-access-lkpvm\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.071815 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-58239723-da4f-403e-b096-d67c0d6effac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-58239723-da4f-403e-b096-d67c0d6effac\") pod \"mariadb-copy-data\" (UID: \"e5ea5e31-cb66-4f96-a051-0730ce894ec2\") " pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.167720 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.654879 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.907720 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"e5ea5e31-cb66-4f96-a051-0730ce894ec2","Type":"ContainerStarted","Data":"dbd942ca08ac8ab1f9baf859388f468865bbe9550fe581f60e0f0b6802325d0f"} Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.907766 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"e5ea5e31-cb66-4f96-a051-0730ce894ec2","Type":"ContainerStarted","Data":"514ce383526866343eb7edc55ac8c261c9eb66e75d3536695e51c53b2056fd4d"} Feb 03 08:51:17 crc kubenswrapper[4998]: I0203 08:51:17.942627 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.9425990459999998 podStartE2EDuration="2.942599046s" podCreationTimestamp="2026-02-03 08:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:51:17.927044935 +0000 UTC m=+7516.213738731" watchObservedRunningTime="2026-02-03 08:51:17.942599046 +0000 UTC m=+7516.229292882" Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.400719 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.402874 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.410850 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.513201 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nhpc\" (UniqueName: \"kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc\") pod \"mariadb-client\" (UID: \"f3c818ee-9300-4711-a712-6814509d18e4\") " pod="openstack/mariadb-client" Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.614878 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nhpc\" (UniqueName: \"kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc\") pod \"mariadb-client\" (UID: \"f3c818ee-9300-4711-a712-6814509d18e4\") " pod="openstack/mariadb-client" Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.632361 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nhpc\" (UniqueName: \"kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc\") pod \"mariadb-client\" (UID: \"f3c818ee-9300-4711-a712-6814509d18e4\") " pod="openstack/mariadb-client" Feb 03 08:51:21 crc kubenswrapper[4998]: I0203 08:51:21.724878 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:22 crc kubenswrapper[4998]: I0203 08:51:22.154184 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:22 crc kubenswrapper[4998]: I0203 08:51:22.945552 4998 generic.go:334] "Generic (PLEG): container finished" podID="f3c818ee-9300-4711-a712-6814509d18e4" containerID="929844c841b906adffb0a84e95a4a9137002e689aafe69e3acc2b5116f75ecbf" exitCode=0 Feb 03 08:51:22 crc kubenswrapper[4998]: I0203 08:51:22.945622 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f3c818ee-9300-4711-a712-6814509d18e4","Type":"ContainerDied","Data":"929844c841b906adffb0a84e95a4a9137002e689aafe69e3acc2b5116f75ecbf"} Feb 03 08:51:22 crc kubenswrapper[4998]: I0203 08:51:22.945968 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f3c818ee-9300-4711-a712-6814509d18e4","Type":"ContainerStarted","Data":"bd5b0b3e92a70f34accd4404fdf41cc88f03c39ef3ad159dc3d20f3814c66043"} Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.290315 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.316345 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_f3c818ee-9300-4711-a712-6814509d18e4/mariadb-client/0.log" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.348997 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.360104 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nhpc\" (UniqueName: \"kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc\") pod \"f3c818ee-9300-4711-a712-6814509d18e4\" (UID: \"f3c818ee-9300-4711-a712-6814509d18e4\") " Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.360733 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.367025 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc" (OuterVolumeSpecName: "kube-api-access-7nhpc") pod "f3c818ee-9300-4711-a712-6814509d18e4" (UID: "f3c818ee-9300-4711-a712-6814509d18e4"). InnerVolumeSpecName "kube-api-access-7nhpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.436388 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3c818ee-9300-4711-a712-6814509d18e4" path="/var/lib/kubelet/pods/f3c818ee-9300-4711-a712-6814509d18e4/volumes" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.459202 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:24 crc kubenswrapper[4998]: E0203 08:51:24.459541 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c818ee-9300-4711-a712-6814509d18e4" containerName="mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.459552 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c818ee-9300-4711-a712-6814509d18e4" containerName="mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.459702 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c818ee-9300-4711-a712-6814509d18e4" containerName="mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.460179 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.465158 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nhpc\" (UniqueName: \"kubernetes.io/projected/f3c818ee-9300-4711-a712-6814509d18e4-kube-api-access-7nhpc\") on node \"crc\" DevicePath \"\"" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.467500 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.566289 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mmw5\" (UniqueName: \"kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5\") pod \"mariadb-client\" (UID: \"1a720c97-e108-4910-96a2-464c029d5f02\") " pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.668581 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mmw5\" (UniqueName: \"kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5\") pod \"mariadb-client\" (UID: \"1a720c97-e108-4910-96a2-464c029d5f02\") " pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.693891 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mmw5\" (UniqueName: \"kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5\") pod \"mariadb-client\" (UID: \"1a720c97-e108-4910-96a2-464c029d5f02\") " pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.791378 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.964212 4998 scope.go:117] "RemoveContainer" containerID="929844c841b906adffb0a84e95a4a9137002e689aafe69e3acc2b5116f75ecbf" Feb 03 08:51:24 crc kubenswrapper[4998]: I0203 08:51:24.964336 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:25 crc kubenswrapper[4998]: I0203 08:51:25.213031 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:25 crc kubenswrapper[4998]: W0203 08:51:25.214769 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a720c97_e108_4910_96a2_464c029d5f02.slice/crio-1ce881c08e5ff0234811938ea0bb89faac5dd86bda6de347b3c543e17ddce69c WatchSource:0}: Error finding container 1ce881c08e5ff0234811938ea0bb89faac5dd86bda6de347b3c543e17ddce69c: Status 404 returned error can't find the container with id 1ce881c08e5ff0234811938ea0bb89faac5dd86bda6de347b3c543e17ddce69c Feb 03 08:51:25 crc kubenswrapper[4998]: I0203 08:51:25.975116 4998 generic.go:334] "Generic (PLEG): container finished" podID="1a720c97-e108-4910-96a2-464c029d5f02" containerID="3279509fc81e6170b74d691581dbe0de32262389c25bd7a75fdde0600b4e6762" exitCode=0 Feb 03 08:51:25 crc kubenswrapper[4998]: I0203 08:51:25.975180 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"1a720c97-e108-4910-96a2-464c029d5f02","Type":"ContainerDied","Data":"3279509fc81e6170b74d691581dbe0de32262389c25bd7a75fdde0600b4e6762"} Feb 03 08:51:25 crc kubenswrapper[4998]: I0203 08:51:25.975800 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"1a720c97-e108-4910-96a2-464c029d5f02","Type":"ContainerStarted","Data":"1ce881c08e5ff0234811938ea0bb89faac5dd86bda6de347b3c543e17ddce69c"} Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.448950 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.468087 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_1a720c97-e108-4910-96a2-464c029d5f02/mariadb-client/0.log" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.494180 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.502163 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.521463 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mmw5\" (UniqueName: \"kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5\") pod \"1a720c97-e108-4910-96a2-464c029d5f02\" (UID: \"1a720c97-e108-4910-96a2-464c029d5f02\") " Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.528434 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5" (OuterVolumeSpecName: "kube-api-access-6mmw5") pod "1a720c97-e108-4910-96a2-464c029d5f02" (UID: "1a720c97-e108-4910-96a2-464c029d5f02"). InnerVolumeSpecName "kube-api-access-6mmw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.607315 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:27 crc kubenswrapper[4998]: E0203 08:51:27.607684 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a720c97-e108-4910-96a2-464c029d5f02" containerName="mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.607700 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a720c97-e108-4910-96a2-464c029d5f02" containerName="mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.607849 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a720c97-e108-4910-96a2-464c029d5f02" containerName="mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.608397 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.614736 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.623695 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxw9b\" (UniqueName: \"kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b\") pod \"mariadb-client\" (UID: \"17a2884d-b3d7-42a1-b698-d8b1c272e6fb\") " pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.623847 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mmw5\" (UniqueName: \"kubernetes.io/projected/1a720c97-e108-4910-96a2-464c029d5f02-kube-api-access-6mmw5\") on node \"crc\" DevicePath \"\"" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.725958 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxw9b\" (UniqueName: \"kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b\") pod \"mariadb-client\" (UID: \"17a2884d-b3d7-42a1-b698-d8b1c272e6fb\") " pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.742392 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxw9b\" (UniqueName: \"kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b\") pod \"mariadb-client\" (UID: \"17a2884d-b3d7-42a1-b698-d8b1c272e6fb\") " pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.924813 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.997497 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ce881c08e5ff0234811938ea0bb89faac5dd86bda6de347b3c543e17ddce69c" Feb 03 08:51:27 crc kubenswrapper[4998]: I0203 08:51:27.997742 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:28 crc kubenswrapper[4998]: I0203 08:51:28.019167 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="1a720c97-e108-4910-96a2-464c029d5f02" podUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" Feb 03 08:51:28 crc kubenswrapper[4998]: I0203 08:51:28.328851 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:28 crc kubenswrapper[4998]: W0203 08:51:28.332858 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17a2884d_b3d7_42a1_b698_d8b1c272e6fb.slice/crio-87f0b0f33f09a6023913bd9ca47d6d1b7bb796351d58b6e09f188237650ae643 WatchSource:0}: Error finding container 87f0b0f33f09a6023913bd9ca47d6d1b7bb796351d58b6e09f188237650ae643: Status 404 returned error can't find the container with id 87f0b0f33f09a6023913bd9ca47d6d1b7bb796351d58b6e09f188237650ae643 Feb 03 08:51:28 crc kubenswrapper[4998]: I0203 08:51:28.428475 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:51:28 crc kubenswrapper[4998]: E0203 08:51:28.429082 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:51:28 crc kubenswrapper[4998]: I0203 08:51:28.439977 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a720c97-e108-4910-96a2-464c029d5f02" path="/var/lib/kubelet/pods/1a720c97-e108-4910-96a2-464c029d5f02/volumes" Feb 03 08:51:29 crc kubenswrapper[4998]: I0203 08:51:29.005702 4998 generic.go:334] "Generic (PLEG): container finished" podID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" containerID="1dbf921ced13b071febeffeb017ecaec85acbe375cedcbeaed8420a49f8cd91a" exitCode=0 Feb 03 08:51:29 crc kubenswrapper[4998]: I0203 08:51:29.005755 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"17a2884d-b3d7-42a1-b698-d8b1c272e6fb","Type":"ContainerDied","Data":"1dbf921ced13b071febeffeb017ecaec85acbe375cedcbeaed8420a49f8cd91a"} Feb 03 08:51:29 crc kubenswrapper[4998]: I0203 08:51:29.005800 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"17a2884d-b3d7-42a1-b698-d8b1c272e6fb","Type":"ContainerStarted","Data":"87f0b0f33f09a6023913bd9ca47d6d1b7bb796351d58b6e09f188237650ae643"} Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.427322 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.453724 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_17a2884d-b3d7-42a1-b698-d8b1c272e6fb/mariadb-client/0.log" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.471026 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxw9b\" (UniqueName: \"kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b\") pod \"17a2884d-b3d7-42a1-b698-d8b1c272e6fb\" (UID: \"17a2884d-b3d7-42a1-b698-d8b1c272e6fb\") " Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.481868 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.487547 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.491324 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b" (OuterVolumeSpecName: "kube-api-access-pxw9b") pod "17a2884d-b3d7-42a1-b698-d8b1c272e6fb" (UID: "17a2884d-b3d7-42a1-b698-d8b1c272e6fb"). InnerVolumeSpecName "kube-api-access-pxw9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.572677 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxw9b\" (UniqueName: \"kubernetes.io/projected/17a2884d-b3d7-42a1-b698-d8b1c272e6fb-kube-api-access-pxw9b\") on node \"crc\" DevicePath \"\"" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.605758 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:30 crc kubenswrapper[4998]: E0203 08:51:30.606179 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" containerName="mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.606198 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" containerName="mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.606392 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" containerName="mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.607119 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.622895 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.673482 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5hbf\" (UniqueName: \"kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf\") pod \"mariadb-client\" (UID: \"b6141d2a-422f-4cd6-88bb-5235dc648a00\") " pod="openstack/mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.775015 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5hbf\" (UniqueName: \"kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf\") pod \"mariadb-client\" (UID: \"b6141d2a-422f-4cd6-88bb-5235dc648a00\") " pod="openstack/mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.804185 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5hbf\" (UniqueName: \"kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf\") pod \"mariadb-client\" (UID: \"b6141d2a-422f-4cd6-88bb-5235dc648a00\") " pod="openstack/mariadb-client" Feb 03 08:51:30 crc kubenswrapper[4998]: I0203 08:51:30.925418 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:31 crc kubenswrapper[4998]: I0203 08:51:31.025244 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87f0b0f33f09a6023913bd9ca47d6d1b7bb796351d58b6e09f188237650ae643" Feb 03 08:51:31 crc kubenswrapper[4998]: I0203 08:51:31.025306 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:31 crc kubenswrapper[4998]: I0203 08:51:31.046985 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" podUID="b6141d2a-422f-4cd6-88bb-5235dc648a00" Feb 03 08:51:31 crc kubenswrapper[4998]: I0203 08:51:31.386812 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:32 crc kubenswrapper[4998]: I0203 08:51:32.036389 4998 generic.go:334] "Generic (PLEG): container finished" podID="b6141d2a-422f-4cd6-88bb-5235dc648a00" containerID="bbd6a7a2a751e5524188ba26d5d24f456b6f1837cf7780ea64e8bd515a44e782" exitCode=0 Feb 03 08:51:32 crc kubenswrapper[4998]: I0203 08:51:32.036441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"b6141d2a-422f-4cd6-88bb-5235dc648a00","Type":"ContainerDied","Data":"bbd6a7a2a751e5524188ba26d5d24f456b6f1837cf7780ea64e8bd515a44e782"} Feb 03 08:51:32 crc kubenswrapper[4998]: I0203 08:51:32.036487 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"b6141d2a-422f-4cd6-88bb-5235dc648a00","Type":"ContainerStarted","Data":"b2caa0f8f9fec866610c54b308b333c062f54931eeca375fe9eea1068d7ba583"} Feb 03 08:51:32 crc kubenswrapper[4998]: I0203 08:51:32.437067 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17a2884d-b3d7-42a1-b698-d8b1c272e6fb" path="/var/lib/kubelet/pods/17a2884d-b3d7-42a1-b698-d8b1c272e6fb/volumes" Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.485531 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.504524 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_b6141d2a-422f-4cd6-88bb-5235dc648a00/mariadb-client/0.log" Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.527490 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5hbf\" (UniqueName: \"kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf\") pod \"b6141d2a-422f-4cd6-88bb-5235dc648a00\" (UID: \"b6141d2a-422f-4cd6-88bb-5235dc648a00\") " Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.530749 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.532304 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf" (OuterVolumeSpecName: "kube-api-access-m5hbf") pod "b6141d2a-422f-4cd6-88bb-5235dc648a00" (UID: "b6141d2a-422f-4cd6-88bb-5235dc648a00"). InnerVolumeSpecName "kube-api-access-m5hbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.539923 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 03 08:51:33 crc kubenswrapper[4998]: I0203 08:51:33.629257 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5hbf\" (UniqueName: \"kubernetes.io/projected/b6141d2a-422f-4cd6-88bb-5235dc648a00-kube-api-access-m5hbf\") on node \"crc\" DevicePath \"\"" Feb 03 08:51:34 crc kubenswrapper[4998]: I0203 08:51:34.052206 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2caa0f8f9fec866610c54b308b333c062f54931eeca375fe9eea1068d7ba583" Feb 03 08:51:34 crc kubenswrapper[4998]: I0203 08:51:34.052252 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 03 08:51:34 crc kubenswrapper[4998]: I0203 08:51:34.435150 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6141d2a-422f-4cd6-88bb-5235dc648a00" path="/var/lib/kubelet/pods/b6141d2a-422f-4cd6-88bb-5235dc648a00/volumes" Feb 03 08:51:43 crc kubenswrapper[4998]: I0203 08:51:43.427714 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:51:43 crc kubenswrapper[4998]: E0203 08:51:43.428757 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:51:56 crc kubenswrapper[4998]: I0203 08:51:56.427984 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:51:56 crc kubenswrapper[4998]: E0203 08:51:56.428859 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.398955 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:10 crc kubenswrapper[4998]: E0203 08:52:10.399740 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6141d2a-422f-4cd6-88bb-5235dc648a00" containerName="mariadb-client" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.399752 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6141d2a-422f-4cd6-88bb-5235dc648a00" containerName="mariadb-client" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.399934 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6141d2a-422f-4cd6-88bb-5235dc648a00" containerName="mariadb-client" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.401135 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.445731 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.545155 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.545220 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzcws\" (UniqueName: \"kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.545485 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.646764 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.646861 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.646881 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzcws\" (UniqueName: \"kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.647282 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.647584 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.675466 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzcws\" (UniqueName: \"kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws\") pod \"redhat-marketplace-fstlf\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:10 crc kubenswrapper[4998]: I0203 08:52:10.726956 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:11 crc kubenswrapper[4998]: I0203 08:52:11.151536 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:11 crc kubenswrapper[4998]: I0203 08:52:11.333546 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerStarted","Data":"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22"} Feb 03 08:52:11 crc kubenswrapper[4998]: I0203 08:52:11.333598 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerStarted","Data":"4b713e8b3cc8b90ce2728ef3d48fe0d42a70c69b67c739becf6a50c9b22e29a6"} Feb 03 08:52:11 crc kubenswrapper[4998]: I0203 08:52:11.427376 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:52:11 crc kubenswrapper[4998]: E0203 08:52:11.427676 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:52:12 crc kubenswrapper[4998]: I0203 08:52:12.343026 4998 generic.go:334] "Generic (PLEG): container finished" podID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerID="3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22" exitCode=0 Feb 03 08:52:12 crc kubenswrapper[4998]: I0203 08:52:12.343315 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerDied","Data":"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22"} Feb 03 08:52:13 crc kubenswrapper[4998]: I0203 08:52:13.354552 4998 generic.go:334] "Generic (PLEG): container finished" podID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerID="dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed" exitCode=0 Feb 03 08:52:13 crc kubenswrapper[4998]: I0203 08:52:13.354685 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerDied","Data":"dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed"} Feb 03 08:52:14 crc kubenswrapper[4998]: I0203 08:52:14.363545 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerStarted","Data":"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b"} Feb 03 08:52:14 crc kubenswrapper[4998]: I0203 08:52:14.389398 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fstlf" podStartSLOduration=2.898702698 podStartE2EDuration="4.389376471s" podCreationTimestamp="2026-02-03 08:52:10 +0000 UTC" firstStartedPulling="2026-02-03 08:52:12.344993705 +0000 UTC m=+7570.631687531" lastFinishedPulling="2026-02-03 08:52:13.835667498 +0000 UTC m=+7572.122361304" observedRunningTime="2026-02-03 08:52:14.388874177 +0000 UTC m=+7572.675568003" watchObservedRunningTime="2026-02-03 08:52:14.389376471 +0000 UTC m=+7572.676070287" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.726834 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.728212 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.731582 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.731620 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-lp2rg" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.734134 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.741315 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.771506 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.773062 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.787298 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.788809 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.812052 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.819166 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.833466 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.833559 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x55z\" (UniqueName: \"kubernetes.io/projected/251347b2-bf73-4795-93f6-2f4813fab858-kube-api-access-6x55z\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.833746 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251347b2-bf73-4795-93f6-2f4813fab858-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.833879 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-config\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.834004 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.834089 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/251347b2-bf73-4795-93f6-2f4813fab858-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.911936 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.914768 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.919394 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.919484 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-sh9v2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.920631 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.935886 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.935932 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x55z\" (UniqueName: \"kubernetes.io/projected/251347b2-bf73-4795-93f6-2f4813fab858-kube-api-access-6x55z\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.935967 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjp7z\" (UniqueName: \"kubernetes.io/projected/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-kube-api-access-fjp7z\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.935995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936025 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936065 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936093 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936395 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251347b2-bf73-4795-93f6-2f4813fab858-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936448 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-config\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936501 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-config\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936534 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936618 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936688 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.936724 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6995p\" (UniqueName: \"kubernetes.io/projected/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-kube-api-access-6995p\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.937707 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-config\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.937827 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/251347b2-bf73-4795-93f6-2f4813fab858-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.944837 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.944937 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.945106 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/251347b2-bf73-4795-93f6-2f4813fab858-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.945216 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-config\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.945962 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/251347b2-bf73-4795-93f6-2f4813fab858-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.948866 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.951263 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.951312 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e16ad9baa79449b222069f5a86125bad9e1ecb693a6c7228c8d1e8295ddadd05/globalmount\"" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.955100 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251347b2-bf73-4795-93f6-2f4813fab858-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.968995 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.970849 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.976536 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x55z\" (UniqueName: \"kubernetes.io/projected/251347b2-bf73-4795-93f6-2f4813fab858-kube-api-access-6x55z\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.994850 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.996522 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:15 crc kubenswrapper[4998]: I0203 08:52:15.997126 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60b9ab9f-05ec-4739-95d5-7973f4d4bdc3\") pod \"ovsdbserver-nb-0\" (UID: \"251347b2-bf73-4795-93f6-2f4813fab858\") " pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.005881 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.025515 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.046636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.047736 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048322 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hpcd\" (UniqueName: \"kubernetes.io/projected/c2d19551-4618-40f2-8404-e4cbf850995b-kube-api-access-9hpcd\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048484 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-config\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048553 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-config\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048610 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048646 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.048968 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049027 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6995p\" (UniqueName: \"kubernetes.io/projected/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-kube-api-access-6995p\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049294 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049053 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049531 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d19551-4618-40f2-8404-e4cbf850995b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049690 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-config\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.049872 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-config\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.050739 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.050751 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-config\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.050853 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.050995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c2d19551-4618-40f2-8404-e4cbf850995b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.051136 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.051480 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.052109 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e71cf37c-0a72-4909-8320-5a09311b0144\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e71cf37c-0a72-4909-8320-5a09311b0144\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.052177 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjp7z\" (UniqueName: \"kubernetes.io/projected/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-kube-api-access-fjp7z\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.052209 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.052269 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.052899 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.057133 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.057210 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/53d40cfe1ad1730876cc78c3c2ed436fcd9001eabab5279a4185bffda2c62786/globalmount\"" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.057137 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.057325 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eabf2ae0b2186308ef9848413f9f432579f543dd444420d4f0beec0433baf8b7/globalmount\"" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.060240 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.065565 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.066380 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6995p\" (UniqueName: \"kubernetes.io/projected/01a6a829-ee2b-4806-87d0-ec9b6c1e8211-kube-api-access-6995p\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.068977 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjp7z\" (UniqueName: \"kubernetes.io/projected/e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f-kube-api-access-fjp7z\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.084257 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b7a28cc-1c90-4df9-b896-28548791d59d\") pod \"ovsdbserver-nb-2\" (UID: \"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f\") " pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.086812 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7a5336d-c350-4ccf-b70e-9a293a56a7b8\") pod \"ovsdbserver-nb-1\" (UID: \"01a6a829-ee2b-4806-87d0-ec9b6c1e8211\") " pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.112810 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154271 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d19551-4618-40f2-8404-e4cbf850995b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154318 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96054ea3-a586-4fae-b48a-e3a439c1944e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154360 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154381 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154400 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154430 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-config\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154452 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96054ea3-a586-4fae-b48a-e3a439c1944e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154470 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c2d19551-4618-40f2-8404-e4cbf850995b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154508 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154531 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e71cf37c-0a72-4909-8320-5a09311b0144\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e71cf37c-0a72-4909-8320-5a09311b0144\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154550 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154590 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-config\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154617 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154658 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hpcd\" (UniqueName: \"kubernetes.io/projected/c2d19551-4618-40f2-8404-e4cbf850995b-kube-api-access-9hpcd\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154682 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxhfz\" (UniqueName: \"kubernetes.io/projected/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-kube-api-access-mxhfz\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154699 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-config\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154717 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.154756 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b5kh\" (UniqueName: \"kubernetes.io/projected/96054ea3-a586-4fae-b48a-e3a439c1944e-kube-api-access-8b5kh\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.156283 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.157261 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c2d19551-4618-40f2-8404-e4cbf850995b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.157688 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2d19551-4618-40f2-8404-e4cbf850995b-config\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.163160 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.163197 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e71cf37c-0a72-4909-8320-5a09311b0144\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e71cf37c-0a72-4909-8320-5a09311b0144\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/829940e5bf788578b7e61df6b180bb15c825a8a27ab641a5019a66bb34c2add3/globalmount\"" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.164114 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d19551-4618-40f2-8404-e4cbf850995b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.173273 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hpcd\" (UniqueName: \"kubernetes.io/projected/c2d19551-4618-40f2-8404-e4cbf850995b-kube-api-access-9hpcd\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.192831 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e71cf37c-0a72-4909-8320-5a09311b0144\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e71cf37c-0a72-4909-8320-5a09311b0144\") pod \"ovsdbserver-sb-0\" (UID: \"c2d19551-4618-40f2-8404-e4cbf850995b\") " pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.238160 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278126 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96054ea3-a586-4fae-b48a-e3a439c1944e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278187 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278238 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-config\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278287 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96054ea3-a586-4fae-b48a-e3a439c1944e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278318 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278354 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278394 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-config\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278431 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278513 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxhfz\" (UniqueName: \"kubernetes.io/projected/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-kube-api-access-mxhfz\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278544 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.278580 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b5kh\" (UniqueName: \"kubernetes.io/projected/96054ea3-a586-4fae-b48a-e3a439c1944e-kube-api-access-8b5kh\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.280303 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.281048 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-config\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.282349 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.282749 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.285152 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.286322 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96054ea3-a586-4fae-b48a-e3a439c1944e-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.286442 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96054ea3-a586-4fae-b48a-e3a439c1944e-config\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.286956 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96054ea3-a586-4fae-b48a-e3a439c1944e-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.294234 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.294284 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f66fb744d1f33bb1f26acc5dea072fd30b20d3bc677718654dd87a08308d3663/globalmount\"" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.302600 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxhfz\" (UniqueName: \"kubernetes.io/projected/09f3d0a5-438b-44d8-8e87-ff59e4cdecde-kube-api-access-mxhfz\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.306920 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b5kh\" (UniqueName: \"kubernetes.io/projected/96054ea3-a586-4fae-b48a-e3a439c1944e-kube-api-access-8b5kh\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.307735 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.307766 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ae7ce9940634bf6312b4c734011e66a9589fb8a4752005b78467a3d64674e97f/globalmount\"" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.352552 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-96b6dad9-a18e-4e97-9ffe-72818b9c4c02\") pod \"ovsdbserver-sb-1\" (UID: \"96054ea3-a586-4fae-b48a-e3a439c1944e\") " pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.357448 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.361648 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ee3575b4-de7e-453a-bdbe-bb103017ddb0\") pod \"ovsdbserver-sb-2\" (UID: \"09f3d0a5-438b-44d8-8e87-ff59e4cdecde\") " pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.391354 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.444831 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.458136 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.656098 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 03 08:52:16 crc kubenswrapper[4998]: W0203 08:52:16.661328 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6f35c23_9d37_42e1_a6ce_d2b3c2bd304f.slice/crio-b1c4276d8fa6bb1623e27f8ee202ccfaf2183599b0aa9eed55b512a965358fe9 WatchSource:0}: Error finding container b1c4276d8fa6bb1623e27f8ee202ccfaf2183599b0aa9eed55b512a965358fe9: Status 404 returned error can't find the container with id b1c4276d8fa6bb1623e27f8ee202ccfaf2183599b0aa9eed55b512a965358fe9 Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.769851 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 08:52:16 crc kubenswrapper[4998]: I0203 08:52:16.906433 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 03 08:52:16 crc kubenswrapper[4998]: W0203 08:52:16.930921 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96054ea3_a586_4fae_b48a_e3a439c1944e.slice/crio-f8e8b6796863cfafe030e5281296a9dc7dc27ecf42bb4404a289199087260b7e WatchSource:0}: Error finding container f8e8b6796863cfafe030e5281296a9dc7dc27ecf42bb4404a289199087260b7e: Status 404 returned error can't find the container with id f8e8b6796863cfafe030e5281296a9dc7dc27ecf42bb4404a289199087260b7e Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.004160 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 03 08:52:17 crc kubenswrapper[4998]: W0203 08:52:17.016575 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01a6a829_ee2b_4806_87d0_ec9b6c1e8211.slice/crio-6d6a8f76e59b16bd3cb572c2fca294e9ae151fc3e3caa038daae1839c50351aa WatchSource:0}: Error finding container 6d6a8f76e59b16bd3cb572c2fca294e9ae151fc3e3caa038daae1839c50351aa: Status 404 returned error can't find the container with id 6d6a8f76e59b16bd3cb572c2fca294e9ae151fc3e3caa038daae1839c50351aa Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.415282 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"251347b2-bf73-4795-93f6-2f4813fab858","Type":"ContainerStarted","Data":"983879d1333f30982fb017d6b2e784b32a48c6233ec04ed867e133d689d0599d"} Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.416759 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"96054ea3-a586-4fae-b48a-e3a439c1944e","Type":"ContainerStarted","Data":"f8e8b6796863cfafe030e5281296a9dc7dc27ecf42bb4404a289199087260b7e"} Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.418020 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f","Type":"ContainerStarted","Data":"b1c4276d8fa6bb1623e27f8ee202ccfaf2183599b0aa9eed55b512a965358fe9"} Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.419283 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c2d19551-4618-40f2-8404-e4cbf850995b","Type":"ContainerStarted","Data":"e6d7b8129d7427f600dc94fc43d108aeb1f203684e190d46a4172e9c9ae7ab8e"} Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.422305 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"01a6a829-ee2b-4806-87d0-ec9b6c1e8211","Type":"ContainerStarted","Data":"6d6a8f76e59b16bd3cb572c2fca294e9ae151fc3e3caa038daae1839c50351aa"} Feb 03 08:52:17 crc kubenswrapper[4998]: I0203 08:52:17.618222 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 03 08:52:17 crc kubenswrapper[4998]: W0203 08:52:17.638470 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09f3d0a5_438b_44d8_8e87_ff59e4cdecde.slice/crio-26ec14835643d8695b781607790188b6f648f8a39d0885296da8ef2348a98515 WatchSource:0}: Error finding container 26ec14835643d8695b781607790188b6f648f8a39d0885296da8ef2348a98515: Status 404 returned error can't find the container with id 26ec14835643d8695b781607790188b6f648f8a39d0885296da8ef2348a98515 Feb 03 08:52:18 crc kubenswrapper[4998]: I0203 08:52:18.453187 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"09f3d0a5-438b-44d8-8e87-ff59e4cdecde","Type":"ContainerStarted","Data":"26ec14835643d8695b781607790188b6f648f8a39d0885296da8ef2348a98515"} Feb 03 08:52:20 crc kubenswrapper[4998]: I0203 08:52:20.728465 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:20 crc kubenswrapper[4998]: I0203 08:52:20.728761 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:20 crc kubenswrapper[4998]: I0203 08:52:20.786119 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.468298 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"96054ea3-a586-4fae-b48a-e3a439c1944e","Type":"ContainerStarted","Data":"bc978bb52b787e103c7f25f958b21bd1e730ef8bf016f7dd49b480b7dceaea85"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.472053 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f","Type":"ContainerStarted","Data":"e3613c93bfd362ffb8bc913348fdb8f2d6db6451c96e3db500a81d9cf388faff"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.476394 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c2d19551-4618-40f2-8404-e4cbf850995b","Type":"ContainerStarted","Data":"853b6a6eff7e4eba97ab7c36b41671ed084df4bcf3d66bb613c51eda85ac4c4a"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.476445 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c2d19551-4618-40f2-8404-e4cbf850995b","Type":"ContainerStarted","Data":"0168d4d8e613c4bc3841b2a02d91fddf19aae4b3d7c8ee4f83e537f0610d1702"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.483195 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"01a6a829-ee2b-4806-87d0-ec9b6c1e8211","Type":"ContainerStarted","Data":"b9151d0053dc50d1289d54bb095d01419e148aca48b0882af66f84db73456ecc"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.491428 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"251347b2-bf73-4795-93f6-2f4813fab858","Type":"ContainerStarted","Data":"57fa7d3d4b633e94616f9f0972618c9f50b2803ac6a58b6b1dca764cebb9940a"} Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.507055 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.361920536 podStartE2EDuration="7.507002746s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:16.777420839 +0000 UTC m=+7575.064114645" lastFinishedPulling="2026-02-03 08:52:20.922503049 +0000 UTC m=+7579.209196855" observedRunningTime="2026-02-03 08:52:21.499713679 +0000 UTC m=+7579.786407485" watchObservedRunningTime="2026-02-03 08:52:21.507002746 +0000 UTC m=+7579.793696552" Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.563111 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:21 crc kubenswrapper[4998]: I0203 08:52:21.621910 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.239694 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.503760 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"01a6a829-ee2b-4806-87d0-ec9b6c1e8211","Type":"ContainerStarted","Data":"c3ca32a6f5168d1f9df90ef72b4cb7addd5392aaa65585f674061be0d8bb6d37"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.506799 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"251347b2-bf73-4795-93f6-2f4813fab858","Type":"ContainerStarted","Data":"601c192b5d8cb0cee36d572a28c5e461dd86def5e94bc4f39d8c7882f08be76d"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.509345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"96054ea3-a586-4fae-b48a-e3a439c1944e","Type":"ContainerStarted","Data":"989f14da27328be6cb389ab6e109b077315286132d0aa1b5f58947cf4b5ca7b8"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.511626 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"09f3d0a5-438b-44d8-8e87-ff59e4cdecde","Type":"ContainerStarted","Data":"eba46ce2b4a8927816f28d318610845a128d36d7ef194ec586e2975e51ae29ae"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.511692 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"09f3d0a5-438b-44d8-8e87-ff59e4cdecde","Type":"ContainerStarted","Data":"93c646af3134672c50f54888daf0f8f9d2ea30f16e3711d1a25d72ee98c10f98"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.513930 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f","Type":"ContainerStarted","Data":"110e82806574a1c1dd3be1ce850d99ddbfd4374902ada45b8e54c4181f8b1b6d"} Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.567129 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.857087036 podStartE2EDuration="8.567102769s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:17.649012943 +0000 UTC m=+7575.935706749" lastFinishedPulling="2026-02-03 08:52:21.359028676 +0000 UTC m=+7579.645722482" observedRunningTime="2026-02-03 08:52:22.563129276 +0000 UTC m=+7580.849823082" watchObservedRunningTime="2026-02-03 08:52:22.567102769 +0000 UTC m=+7580.853796595" Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.593365 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.618377091 podStartE2EDuration="8.593343853s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:17.024007787 +0000 UTC m=+7575.310701593" lastFinishedPulling="2026-02-03 08:52:20.998974549 +0000 UTC m=+7579.285668355" observedRunningTime="2026-02-03 08:52:22.589237437 +0000 UTC m=+7580.875931253" watchObservedRunningTime="2026-02-03 08:52:22.593343853 +0000 UTC m=+7580.880037659" Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.609191 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.604031425 podStartE2EDuration="8.609169453s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:16.936421221 +0000 UTC m=+7575.223115027" lastFinishedPulling="2026-02-03 08:52:20.941559249 +0000 UTC m=+7579.228253055" observedRunningTime="2026-02-03 08:52:22.608135523 +0000 UTC m=+7580.894829339" watchObservedRunningTime="2026-02-03 08:52:22.609169453 +0000 UTC m=+7580.895863269" Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.627675 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.139156371 podStartE2EDuration="8.627655447s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:16.488594022 +0000 UTC m=+7574.775287828" lastFinishedPulling="2026-02-03 08:52:20.977093068 +0000 UTC m=+7579.263786904" observedRunningTime="2026-02-03 08:52:22.625162966 +0000 UTC m=+7580.911856782" watchObservedRunningTime="2026-02-03 08:52:22.627655447 +0000 UTC m=+7580.914349253" Feb 03 08:52:22 crc kubenswrapper[4998]: I0203 08:52:22.647690 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.387123708 podStartE2EDuration="8.647671925s" podCreationTimestamp="2026-02-03 08:52:14 +0000 UTC" firstStartedPulling="2026-02-03 08:52:16.663764693 +0000 UTC m=+7574.950458509" lastFinishedPulling="2026-02-03 08:52:20.92431292 +0000 UTC m=+7579.211006726" observedRunningTime="2026-02-03 08:52:22.643045294 +0000 UTC m=+7580.929739110" watchObservedRunningTime="2026-02-03 08:52:22.647671925 +0000 UTC m=+7580.934365731" Feb 03 08:52:23 crc kubenswrapper[4998]: I0203 08:52:23.520929 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fstlf" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="registry-server" containerID="cri-o://a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b" gracePeriod=2 Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.093803 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.269771 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzcws\" (UniqueName: \"kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws\") pod \"7be99686-9ff6-4b5e-80b8-f54060e31c55\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.269896 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content\") pod \"7be99686-9ff6-4b5e-80b8-f54060e31c55\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.269946 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities\") pod \"7be99686-9ff6-4b5e-80b8-f54060e31c55\" (UID: \"7be99686-9ff6-4b5e-80b8-f54060e31c55\") " Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.270857 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities" (OuterVolumeSpecName: "utilities") pod "7be99686-9ff6-4b5e-80b8-f54060e31c55" (UID: "7be99686-9ff6-4b5e-80b8-f54060e31c55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.286749 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws" (OuterVolumeSpecName: "kube-api-access-zzcws") pod "7be99686-9ff6-4b5e-80b8-f54060e31c55" (UID: "7be99686-9ff6-4b5e-80b8-f54060e31c55"). InnerVolumeSpecName "kube-api-access-zzcws". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.292804 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7be99686-9ff6-4b5e-80b8-f54060e31c55" (UID: "7be99686-9ff6-4b5e-80b8-f54060e31c55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.371535 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzcws\" (UniqueName: \"kubernetes.io/projected/7be99686-9ff6-4b5e-80b8-f54060e31c55-kube-api-access-zzcws\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.371571 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.371582 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be99686-9ff6-4b5e-80b8-f54060e31c55-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.531995 4998 generic.go:334] "Generic (PLEG): container finished" podID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerID="a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b" exitCode=0 Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.532061 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerDied","Data":"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b"} Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.532103 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fstlf" event={"ID":"7be99686-9ff6-4b5e-80b8-f54060e31c55","Type":"ContainerDied","Data":"4b713e8b3cc8b90ce2728ef3d48fe0d42a70c69b67c739becf6a50c9b22e29a6"} Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.532123 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fstlf" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.532134 4998 scope.go:117] "RemoveContainer" containerID="a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.562878 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.568670 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fstlf"] Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.569266 4998 scope.go:117] "RemoveContainer" containerID="dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.595107 4998 scope.go:117] "RemoveContainer" containerID="3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.641135 4998 scope.go:117] "RemoveContainer" containerID="a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b" Feb 03 08:52:24 crc kubenswrapper[4998]: E0203 08:52:24.641769 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b\": container with ID starting with a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b not found: ID does not exist" containerID="a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.641886 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b"} err="failed to get container status \"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b\": rpc error: code = NotFound desc = could not find container \"a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b\": container with ID starting with a2331f5954042bb24a21f8006f2eced4d8fb2b11976c0048dd06284f129d8c6b not found: ID does not exist" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.641921 4998 scope.go:117] "RemoveContainer" containerID="dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed" Feb 03 08:52:24 crc kubenswrapper[4998]: E0203 08:52:24.642420 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed\": container with ID starting with dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed not found: ID does not exist" containerID="dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.642459 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed"} err="failed to get container status \"dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed\": rpc error: code = NotFound desc = could not find container \"dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed\": container with ID starting with dbcae7113940971df380f5fc6db0ea7b8d5b51c5f18d2398eecd184f3dd0feed not found: ID does not exist" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.642483 4998 scope.go:117] "RemoveContainer" containerID="3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22" Feb 03 08:52:24 crc kubenswrapper[4998]: E0203 08:52:24.642992 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22\": container with ID starting with 3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22 not found: ID does not exist" containerID="3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22" Feb 03 08:52:24 crc kubenswrapper[4998]: I0203 08:52:24.643031 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22"} err="failed to get container status \"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22\": rpc error: code = NotFound desc = could not find container \"3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22\": container with ID starting with 3c7e3b0d4024066f3f40e9aa686166fb2b34b6042ec575b234ebb2397f9d9c22 not found: ID does not exist" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.052271 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.094771 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.113768 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.176057 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.280655 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.281083 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.358146 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.393192 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.397474 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.445959 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.449746 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.504601 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.560744 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.560800 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.560813 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.560822 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:25 crc kubenswrapper[4998]: I0203 08:52:25.560834 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.099736 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.156926 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.298657 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.431798 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:26 crc kubenswrapper[4998]: E0203 08:52:26.432267 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="extract-utilities" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.432292 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="extract-utilities" Feb 03 08:52:26 crc kubenswrapper[4998]: E0203 08:52:26.432306 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="extract-content" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.432313 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="extract-content" Feb 03 08:52:26 crc kubenswrapper[4998]: E0203 08:52:26.432340 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="registry-server" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.432348 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="registry-server" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.432558 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" containerName="registry-server" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.437618 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:52:26 crc kubenswrapper[4998]: E0203 08:52:26.437858 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.438703 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.446596 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.535004 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.535300 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.535388 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mttnk\" (UniqueName: \"kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.535549 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.535903 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7be99686-9ff6-4b5e-80b8-f54060e31c55" path="/var/lib/kubelet/pods/7be99686-9ff6-4b5e-80b8-f54060e31c55/volumes" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.536772 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.571671 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.639774 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.639874 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.639914 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mttnk\" (UniqueName: \"kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.639987 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.641086 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.641499 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.642530 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.665599 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.694101 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.697572 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mttnk\" (UniqueName: \"kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk\") pod \"dnsmasq-dns-7d9d759b4f-rk6jh\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.775770 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.837933 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.878235 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.891156 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.891281 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.893702 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.947751 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.948071 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp6ww\" (UniqueName: \"kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.948095 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.948111 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:26 crc kubenswrapper[4998]: I0203 08:52:26.948152 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.049066 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp6ww\" (UniqueName: \"kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.049113 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.049133 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.049183 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.049227 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.050143 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.050233 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.050671 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.050878 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.069515 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp6ww\" (UniqueName: \"kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww\") pod \"dnsmasq-dns-7b779fdfb7-g89gs\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.223561 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.335323 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.584597 4998 generic.go:334] "Generic (PLEG): container finished" podID="64ed3603-f2b2-4b8d-ac69-b70df1f604b3" containerID="dba173a9d44d0c602aa582a3ba22557cd802a0cac3ce9a3fac05a16a173a1335" exitCode=0 Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.584969 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" event={"ID":"64ed3603-f2b2-4b8d-ac69-b70df1f604b3","Type":"ContainerDied","Data":"dba173a9d44d0c602aa582a3ba22557cd802a0cac3ce9a3fac05a16a173a1335"} Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.585006 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" event={"ID":"64ed3603-f2b2-4b8d-ac69-b70df1f604b3","Type":"ContainerStarted","Data":"89e73979d688b7a1f47dd5effb7f5abb4e0775c8e866e974596fc10c8eeb74f4"} Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.662802 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:52:27 crc kubenswrapper[4998]: W0203 08:52:27.669126 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6de37a47_b912_4648_afd8_43e6a6f8187f.slice/crio-fa60074854880cd8d8127fc43d3ae05ad5073ec7a64998a2a7eed382c1ebfb06 WatchSource:0}: Error finding container fa60074854880cd8d8127fc43d3ae05ad5073ec7a64998a2a7eed382c1ebfb06: Status 404 returned error can't find the container with id fa60074854880cd8d8127fc43d3ae05ad5073ec7a64998a2a7eed382c1ebfb06 Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.920415 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.967381 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb\") pod \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.967486 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config\") pod \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.967587 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc\") pod \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.967630 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mttnk\" (UniqueName: \"kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk\") pod \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\" (UID: \"64ed3603-f2b2-4b8d-ac69-b70df1f604b3\") " Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.973582 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk" (OuterVolumeSpecName: "kube-api-access-mttnk") pod "64ed3603-f2b2-4b8d-ac69-b70df1f604b3" (UID: "64ed3603-f2b2-4b8d-ac69-b70df1f604b3"). InnerVolumeSpecName "kube-api-access-mttnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.989315 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64ed3603-f2b2-4b8d-ac69-b70df1f604b3" (UID: "64ed3603-f2b2-4b8d-ac69-b70df1f604b3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.989498 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64ed3603-f2b2-4b8d-ac69-b70df1f604b3" (UID: "64ed3603-f2b2-4b8d-ac69-b70df1f604b3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:27 crc kubenswrapper[4998]: I0203 08:52:27.990265 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config" (OuterVolumeSpecName: "config") pod "64ed3603-f2b2-4b8d-ac69-b70df1f604b3" (UID: "64ed3603-f2b2-4b8d-ac69-b70df1f604b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.069632 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.069662 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.069671 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.069680 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mttnk\" (UniqueName: \"kubernetes.io/projected/64ed3603-f2b2-4b8d-ac69-b70df1f604b3-kube-api-access-mttnk\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.592566 4998 generic.go:334] "Generic (PLEG): container finished" podID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerID="0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd" exitCode=0 Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.592628 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" event={"ID":"6de37a47-b912-4648-afd8-43e6a6f8187f","Type":"ContainerDied","Data":"0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd"} Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.592867 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" event={"ID":"6de37a47-b912-4648-afd8-43e6a6f8187f","Type":"ContainerStarted","Data":"fa60074854880cd8d8127fc43d3ae05ad5073ec7a64998a2a7eed382c1ebfb06"} Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.594385 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" event={"ID":"64ed3603-f2b2-4b8d-ac69-b70df1f604b3","Type":"ContainerDied","Data":"89e73979d688b7a1f47dd5effb7f5abb4e0775c8e866e974596fc10c8eeb74f4"} Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.594418 4998 scope.go:117] "RemoveContainer" containerID="dba173a9d44d0c602aa582a3ba22557cd802a0cac3ce9a3fac05a16a173a1335" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.594492 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9d759b4f-rk6jh" Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.672641 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:28 crc kubenswrapper[4998]: I0203 08:52:28.684411 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d9d759b4f-rk6jh"] Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.327230 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Feb 03 08:52:29 crc kubenswrapper[4998]: E0203 08:52:29.328208 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ed3603-f2b2-4b8d-ac69-b70df1f604b3" containerName="init" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.328242 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ed3603-f2b2-4b8d-ac69-b70df1f604b3" containerName="init" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.328527 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="64ed3603-f2b2-4b8d-ac69-b70df1f604b3" containerName="init" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.329440 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.332066 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.338400 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.391934 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnqwz\" (UniqueName: \"kubernetes.io/projected/20db43e7-c88c-4206-9909-c428d30d722c-kube-api-access-wnqwz\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.392004 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/20db43e7-c88c-4206-9909-c428d30d722c-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.392173 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.493818 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.494034 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnqwz\" (UniqueName: \"kubernetes.io/projected/20db43e7-c88c-4206-9909-c428d30d722c-kube-api-access-wnqwz\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.494061 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/20db43e7-c88c-4206-9909-c428d30d722c-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.497867 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.497900 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/01517eb16d9680beb6d4a380d560a4777a55f14af99df2c3e37e9aab84233330/globalmount\"" pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.500165 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/20db43e7-c88c-4206-9909-c428d30d722c-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.512032 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnqwz\" (UniqueName: \"kubernetes.io/projected/20db43e7-c88c-4206-9909-c428d30d722c-kube-api-access-wnqwz\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.523600 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0074cf0-37e9-49d5-b3e2-038a1ade318f\") pod \"ovn-copy-data\" (UID: \"20db43e7-c88c-4206-9909-c428d30d722c\") " pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.604516 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" event={"ID":"6de37a47-b912-4648-afd8-43e6a6f8187f","Type":"ContainerStarted","Data":"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0"} Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.604660 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.647394 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 03 08:52:29 crc kubenswrapper[4998]: I0203 08:52:29.652042 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" podStartSLOduration=3.652024195 podStartE2EDuration="3.652024195s" podCreationTimestamp="2026-02-03 08:52:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:52:29.647226689 +0000 UTC m=+7587.933920515" watchObservedRunningTime="2026-02-03 08:52:29.652024195 +0000 UTC m=+7587.938718001" Feb 03 08:52:30 crc kubenswrapper[4998]: I0203 08:52:30.315631 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Feb 03 08:52:30 crc kubenswrapper[4998]: I0203 08:52:30.437275 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64ed3603-f2b2-4b8d-ac69-b70df1f604b3" path="/var/lib/kubelet/pods/64ed3603-f2b2-4b8d-ac69-b70df1f604b3/volumes" Feb 03 08:52:30 crc kubenswrapper[4998]: I0203 08:52:30.613450 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"20db43e7-c88c-4206-9909-c428d30d722c","Type":"ContainerStarted","Data":"51b3bf3731fd5da2c28c2be0a2d12f1e613cc583a0219bb027ca2d1f545ed05a"} Feb 03 08:52:31 crc kubenswrapper[4998]: I0203 08:52:31.625068 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"20db43e7-c88c-4206-9909-c428d30d722c","Type":"ContainerStarted","Data":"3be0a24358fd8116af5fe9f1df3a54555b6a87175a9e8a195fc3d9a15951cc66"} Feb 03 08:52:31 crc kubenswrapper[4998]: I0203 08:52:31.645106 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.264278378 podStartE2EDuration="3.645085235s" podCreationTimestamp="2026-02-03 08:52:28 +0000 UTC" firstStartedPulling="2026-02-03 08:52:30.310585364 +0000 UTC m=+7588.597279170" lastFinishedPulling="2026-02-03 08:52:30.691392221 +0000 UTC m=+7588.978086027" observedRunningTime="2026-02-03 08:52:31.644303853 +0000 UTC m=+7589.930997679" watchObservedRunningTime="2026-02-03 08:52:31.645085235 +0000 UTC m=+7589.931779041" Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.225091 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.322496 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.323073 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="dnsmasq-dns" containerID="cri-o://416d0a895e0d7618c5bc0c009f606ed5836c0ba55ac8f2b0221a2c80b6b4dc83" gracePeriod=10 Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.674674 4998 generic.go:334] "Generic (PLEG): container finished" podID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerID="416d0a895e0d7618c5bc0c009f606ed5836c0ba55ac8f2b0221a2c80b6b4dc83" exitCode=0 Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.674770 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerDied","Data":"416d0a895e0d7618c5bc0c009f606ed5836c0ba55ac8f2b0221a2c80b6b4dc83"} Feb 03 08:52:37 crc kubenswrapper[4998]: I0203 08:52:37.938073 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.041871 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config\") pod \"6857c913-b07d-495c-a08f-66b849ce9d2d\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.041976 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc\") pod \"6857c913-b07d-495c-a08f-66b849ce9d2d\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.042136 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q955x\" (UniqueName: \"kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x\") pod \"6857c913-b07d-495c-a08f-66b849ce9d2d\" (UID: \"6857c913-b07d-495c-a08f-66b849ce9d2d\") " Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.052204 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x" (OuterVolumeSpecName: "kube-api-access-q955x") pod "6857c913-b07d-495c-a08f-66b849ce9d2d" (UID: "6857c913-b07d-495c-a08f-66b849ce9d2d"). InnerVolumeSpecName "kube-api-access-q955x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.099864 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config" (OuterVolumeSpecName: "config") pod "6857c913-b07d-495c-a08f-66b849ce9d2d" (UID: "6857c913-b07d-495c-a08f-66b849ce9d2d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.105440 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6857c913-b07d-495c-a08f-66b849ce9d2d" (UID: "6857c913-b07d-495c-a08f-66b849ce9d2d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.144204 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.144479 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q955x\" (UniqueName: \"kubernetes.io/projected/6857c913-b07d-495c-a08f-66b849ce9d2d-kube-api-access-q955x\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.144492 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6857c913-b07d-495c-a08f-66b849ce9d2d-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.427460 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:52:38 crc kubenswrapper[4998]: E0203 08:52:38.429059 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.684385 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" event={"ID":"6857c913-b07d-495c-a08f-66b849ce9d2d","Type":"ContainerDied","Data":"787380ee41b3ebf79a4f419ae87260e69b4553c047e80adf8effdfee3059c892"} Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.684442 4998 scope.go:117] "RemoveContainer" containerID="416d0a895e0d7618c5bc0c009f606ed5836c0ba55ac8f2b0221a2c80b6b4dc83" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.684858 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbff95ccf-lcrch" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.705754 4998 scope.go:117] "RemoveContainer" containerID="212ff08b5c037b58b878e2b9ae1bcf51aaa8891373180be2eea9a4c805d46374" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.710545 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.718329 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cbff95ccf-lcrch"] Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.852573 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 03 08:52:38 crc kubenswrapper[4998]: E0203 08:52:38.852932 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="dnsmasq-dns" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.852949 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="dnsmasq-dns" Feb 03 08:52:38 crc kubenswrapper[4998]: E0203 08:52:38.852977 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="init" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.852985 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="init" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.853162 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" containerName="dnsmasq-dns" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.854211 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.856449 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-6qfm4" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.857805 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.857954 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.880715 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.957892 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/44c85897-bda5-4810-b7a8-35b57d1d7fc6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.958059 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c85897-bda5-4810-b7a8-35b57d1d7fc6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.958128 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9l79\" (UniqueName: \"kubernetes.io/projected/44c85897-bda5-4810-b7a8-35b57d1d7fc6-kube-api-access-b9l79\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.958153 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-config\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:38 crc kubenswrapper[4998]: I0203 08:52:38.958208 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-scripts\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059203 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/44c85897-bda5-4810-b7a8-35b57d1d7fc6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059262 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c85897-bda5-4810-b7a8-35b57d1d7fc6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059298 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9l79\" (UniqueName: \"kubernetes.io/projected/44c85897-bda5-4810-b7a8-35b57d1d7fc6-kube-api-access-b9l79\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059317 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-config\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059352 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-scripts\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.059670 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/44c85897-bda5-4810-b7a8-35b57d1d7fc6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.060038 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-scripts\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.060371 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44c85897-bda5-4810-b7a8-35b57d1d7fc6-config\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.065743 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c85897-bda5-4810-b7a8-35b57d1d7fc6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.077712 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9l79\" (UniqueName: \"kubernetes.io/projected/44c85897-bda5-4810-b7a8-35b57d1d7fc6-kube-api-access-b9l79\") pod \"ovn-northd-0\" (UID: \"44c85897-bda5-4810-b7a8-35b57d1d7fc6\") " pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.174015 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.627361 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 08:52:39 crc kubenswrapper[4998]: W0203 08:52:39.630464 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44c85897_bda5_4810_b7a8_35b57d1d7fc6.slice/crio-a7c89cd8c839e82f01937b115686b6c66b82ca2571da7e90ae73fed18dbe7f31 WatchSource:0}: Error finding container a7c89cd8c839e82f01937b115686b6c66b82ca2571da7e90ae73fed18dbe7f31: Status 404 returned error can't find the container with id a7c89cd8c839e82f01937b115686b6c66b82ca2571da7e90ae73fed18dbe7f31 Feb 03 08:52:39 crc kubenswrapper[4998]: I0203 08:52:39.693152 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"44c85897-bda5-4810-b7a8-35b57d1d7fc6","Type":"ContainerStarted","Data":"a7c89cd8c839e82f01937b115686b6c66b82ca2571da7e90ae73fed18dbe7f31"} Feb 03 08:52:40 crc kubenswrapper[4998]: I0203 08:52:40.458874 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6857c913-b07d-495c-a08f-66b849ce9d2d" path="/var/lib/kubelet/pods/6857c913-b07d-495c-a08f-66b849ce9d2d/volumes" Feb 03 08:52:40 crc kubenswrapper[4998]: I0203 08:52:40.720413 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"44c85897-bda5-4810-b7a8-35b57d1d7fc6","Type":"ContainerStarted","Data":"aa06c2390d2f7d7e1733157160bef954e70c782a5e7f42363c7ed2fe75aa4793"} Feb 03 08:52:41 crc kubenswrapper[4998]: I0203 08:52:41.731314 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"44c85897-bda5-4810-b7a8-35b57d1d7fc6","Type":"ContainerStarted","Data":"34e6a5bbb3c1da2da14f3ec772cd374d137adfe97f539fd5132fc7a531d71eac"} Feb 03 08:52:41 crc kubenswrapper[4998]: I0203 08:52:41.731824 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 03 08:52:41 crc kubenswrapper[4998]: I0203 08:52:41.756305 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.122253499 podStartE2EDuration="3.756274832s" podCreationTimestamp="2026-02-03 08:52:38 +0000 UTC" firstStartedPulling="2026-02-03 08:52:39.633021408 +0000 UTC m=+7597.919715214" lastFinishedPulling="2026-02-03 08:52:40.267042741 +0000 UTC m=+7598.553736547" observedRunningTime="2026-02-03 08:52:41.745443255 +0000 UTC m=+7600.032137111" watchObservedRunningTime="2026-02-03 08:52:41.756274832 +0000 UTC m=+7600.042968658" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.297238 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.299599 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.316749 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.364775 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.364891 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.364983 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxsmx\" (UniqueName: \"kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.466685 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.467204 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.467823 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.467343 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.469289 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxsmx\" (UniqueName: \"kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.488376 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxsmx\" (UniqueName: \"kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx\") pod \"certified-operators-tc2c6\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:44 crc kubenswrapper[4998]: I0203 08:52:44.619067 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.007821 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c1fc-account-create-update-77knt"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.038102 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-kxrfk"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.039028 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.039876 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.042139 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.046004 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kxrfk"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.056891 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c1fc-account-create-update-77knt"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.082859 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.082927 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.082987 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lcpf\" (UniqueName: \"kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.083013 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46nnp\" (UniqueName: \"kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.147494 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.184208 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lcpf\" (UniqueName: \"kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.184497 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46nnp\" (UniqueName: \"kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.184666 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.184831 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.185445 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.185775 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.205756 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lcpf\" (UniqueName: \"kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf\") pod \"keystone-db-create-kxrfk\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.208284 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46nnp\" (UniqueName: \"kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp\") pod \"keystone-c1fc-account-create-update-77knt\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.397993 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.433625 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.777996 4998 generic.go:334] "Generic (PLEG): container finished" podID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerID="8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d" exitCode=0 Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.778148 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerDied","Data":"8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d"} Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.778404 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerStarted","Data":"9de8eaf27e8b454fbb3087b90f9280aac6442a5237c94385f2e40fba6907553f"} Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.856052 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kxrfk"] Feb 03 08:52:45 crc kubenswrapper[4998]: I0203 08:52:45.954456 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c1fc-account-create-update-77knt"] Feb 03 08:52:45 crc kubenswrapper[4998]: W0203 08:52:45.956629 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod588b9b50_534f_4ba6_a43f_d5065b6d380b.slice/crio-3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f WatchSource:0}: Error finding container 3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f: Status 404 returned error can't find the container with id 3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.793169 4998 generic.go:334] "Generic (PLEG): container finished" podID="e701d871-ccce-4ab7-bb1a-4fb9c1766519" containerID="4e57e862dc5310bc81886881da2c1766eb97bb67d6d4419cadb9cc96753d2d2d" exitCode=0 Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.793275 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kxrfk" event={"ID":"e701d871-ccce-4ab7-bb1a-4fb9c1766519","Type":"ContainerDied","Data":"4e57e862dc5310bc81886881da2c1766eb97bb67d6d4419cadb9cc96753d2d2d"} Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.793304 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kxrfk" event={"ID":"e701d871-ccce-4ab7-bb1a-4fb9c1766519","Type":"ContainerStarted","Data":"e88e04e5b7c2e2cfbc2d099ace54c1c584e8e2df06c7aef10ca62cddb137f043"} Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.796249 4998 generic.go:334] "Generic (PLEG): container finished" podID="588b9b50-534f-4ba6-a43f-d5065b6d380b" containerID="730db2e28f5983a06e19092a1a1ebf2d8fb475a548b0a1ab528eed6af9061f12" exitCode=0 Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.796348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c1fc-account-create-update-77knt" event={"ID":"588b9b50-534f-4ba6-a43f-d5065b6d380b","Type":"ContainerDied","Data":"730db2e28f5983a06e19092a1a1ebf2d8fb475a548b0a1ab528eed6af9061f12"} Feb 03 08:52:46 crc kubenswrapper[4998]: I0203 08:52:46.796557 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c1fc-account-create-update-77knt" event={"ID":"588b9b50-534f-4ba6-a43f-d5065b6d380b","Type":"ContainerStarted","Data":"3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f"} Feb 03 08:52:47 crc kubenswrapper[4998]: I0203 08:52:47.806076 4998 generic.go:334] "Generic (PLEG): container finished" podID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerID="d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768" exitCode=0 Feb 03 08:52:47 crc kubenswrapper[4998]: I0203 08:52:47.806123 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerDied","Data":"d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768"} Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.174179 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.239864 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lcpf\" (UniqueName: \"kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf\") pod \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.239920 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts\") pod \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\" (UID: \"e701d871-ccce-4ab7-bb1a-4fb9c1766519\") " Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.241364 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e701d871-ccce-4ab7-bb1a-4fb9c1766519" (UID: "e701d871-ccce-4ab7-bb1a-4fb9c1766519"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.251115 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf" (OuterVolumeSpecName: "kube-api-access-9lcpf") pod "e701d871-ccce-4ab7-bb1a-4fb9c1766519" (UID: "e701d871-ccce-4ab7-bb1a-4fb9c1766519"). InnerVolumeSpecName "kube-api-access-9lcpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.343401 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lcpf\" (UniqueName: \"kubernetes.io/projected/e701d871-ccce-4ab7-bb1a-4fb9c1766519-kube-api-access-9lcpf\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.343437 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e701d871-ccce-4ab7-bb1a-4fb9c1766519-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.399699 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.444956 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts\") pod \"588b9b50-534f-4ba6-a43f-d5065b6d380b\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.445086 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46nnp\" (UniqueName: \"kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp\") pod \"588b9b50-534f-4ba6-a43f-d5065b6d380b\" (UID: \"588b9b50-534f-4ba6-a43f-d5065b6d380b\") " Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.445449 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "588b9b50-534f-4ba6-a43f-d5065b6d380b" (UID: "588b9b50-534f-4ba6-a43f-d5065b6d380b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.445738 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/588b9b50-534f-4ba6-a43f-d5065b6d380b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.449798 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp" (OuterVolumeSpecName: "kube-api-access-46nnp") pod "588b9b50-534f-4ba6-a43f-d5065b6d380b" (UID: "588b9b50-534f-4ba6-a43f-d5065b6d380b"). InnerVolumeSpecName "kube-api-access-46nnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.548028 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46nnp\" (UniqueName: \"kubernetes.io/projected/588b9b50-534f-4ba6-a43f-d5065b6d380b-kube-api-access-46nnp\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.816011 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerStarted","Data":"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7"} Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.817902 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c1fc-account-create-update-77knt" event={"ID":"588b9b50-534f-4ba6-a43f-d5065b6d380b","Type":"ContainerDied","Data":"3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f"} Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.817932 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ddec7d6946f078d45435a438f103604e445b5d9c85885d23f28762c1eafb54f" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.817946 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c1fc-account-create-update-77knt" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.819683 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kxrfk" event={"ID":"e701d871-ccce-4ab7-bb1a-4fb9c1766519","Type":"ContainerDied","Data":"e88e04e5b7c2e2cfbc2d099ace54c1c584e8e2df06c7aef10ca62cddb137f043"} Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.819762 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kxrfk" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.819796 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e88e04e5b7c2e2cfbc2d099ace54c1c584e8e2df06c7aef10ca62cddb137f043" Feb 03 08:52:48 crc kubenswrapper[4998]: I0203 08:52:48.842227 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tc2c6" podStartSLOduration=2.35130778 podStartE2EDuration="4.842208847s" podCreationTimestamp="2026-02-03 08:52:44 +0000 UTC" firstStartedPulling="2026-02-03 08:52:45.779825143 +0000 UTC m=+7604.066518949" lastFinishedPulling="2026-02-03 08:52:48.27072621 +0000 UTC m=+7606.557420016" observedRunningTime="2026-02-03 08:52:48.835101466 +0000 UTC m=+7607.121795272" watchObservedRunningTime="2026-02-03 08:52:48.842208847 +0000 UTC m=+7607.128902653" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.478393 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xw255"] Feb 03 08:52:50 crc kubenswrapper[4998]: E0203 08:52:50.478762 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e701d871-ccce-4ab7-bb1a-4fb9c1766519" containerName="mariadb-database-create" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.478797 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="e701d871-ccce-4ab7-bb1a-4fb9c1766519" containerName="mariadb-database-create" Feb 03 08:52:50 crc kubenswrapper[4998]: E0203 08:52:50.478825 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="588b9b50-534f-4ba6-a43f-d5065b6d380b" containerName="mariadb-account-create-update" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.478834 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="588b9b50-534f-4ba6-a43f-d5065b6d380b" containerName="mariadb-account-create-update" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.479046 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="588b9b50-534f-4ba6-a43f-d5065b6d380b" containerName="mariadb-account-create-update" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.479069 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="e701d871-ccce-4ab7-bb1a-4fb9c1766519" containerName="mariadb-database-create" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.479763 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.482739 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.483021 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.483164 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-79lr8" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.483165 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.498554 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xw255"] Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.584318 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.584731 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5qln\" (UniqueName: \"kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.584821 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.686993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.687111 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5qln\" (UniqueName: \"kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.687177 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.692880 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.692982 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.703310 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5qln\" (UniqueName: \"kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln\") pod \"keystone-db-sync-xw255\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:50 crc kubenswrapper[4998]: I0203 08:52:50.796440 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xw255" Feb 03 08:52:51 crc kubenswrapper[4998]: I0203 08:52:51.228947 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xw255"] Feb 03 08:52:51 crc kubenswrapper[4998]: W0203 08:52:51.230984 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b4e9b73_32ac_4ce4_b368_7bf898a77203.slice/crio-8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d WatchSource:0}: Error finding container 8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d: Status 404 returned error can't find the container with id 8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d Feb 03 08:52:51 crc kubenswrapper[4998]: I0203 08:52:51.848716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xw255" event={"ID":"7b4e9b73-32ac-4ce4-b368-7bf898a77203","Type":"ContainerStarted","Data":"8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d"} Feb 03 08:52:52 crc kubenswrapper[4998]: I0203 08:52:52.442892 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:52:52 crc kubenswrapper[4998]: E0203 08:52:52.444430 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:52:54 crc kubenswrapper[4998]: I0203 08:52:54.619387 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:54 crc kubenswrapper[4998]: I0203 08:52:54.619743 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:54 crc kubenswrapper[4998]: I0203 08:52:54.673873 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:54 crc kubenswrapper[4998]: I0203 08:52:54.914564 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:54 crc kubenswrapper[4998]: I0203 08:52:54.969215 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:56 crc kubenswrapper[4998]: I0203 08:52:56.886132 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xw255" event={"ID":"7b4e9b73-32ac-4ce4-b368-7bf898a77203","Type":"ContainerStarted","Data":"902de658799b556c98159961659608e7982b4826b627f6bb02e8f1bf5018079d"} Feb 03 08:52:56 crc kubenswrapper[4998]: I0203 08:52:56.886538 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tc2c6" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="registry-server" containerID="cri-o://3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7" gracePeriod=2 Feb 03 08:52:56 crc kubenswrapper[4998]: I0203 08:52:56.916764 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xw255" podStartSLOduration=2.156646895 podStartE2EDuration="6.916741488s" podCreationTimestamp="2026-02-03 08:52:50 +0000 UTC" firstStartedPulling="2026-02-03 08:52:51.233261381 +0000 UTC m=+7609.519955187" lastFinishedPulling="2026-02-03 08:52:55.993355974 +0000 UTC m=+7614.280049780" observedRunningTime="2026-02-03 08:52:56.911989873 +0000 UTC m=+7615.198683719" watchObservedRunningTime="2026-02-03 08:52:56.916741488 +0000 UTC m=+7615.203435294" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.376607 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.407161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities\") pod \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.407225 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content\") pod \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.407311 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxsmx\" (UniqueName: \"kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx\") pod \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\" (UID: \"a17ca407-6690-4060-8dc4-1a2e44fb32d8\") " Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.408341 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities" (OuterVolumeSpecName: "utilities") pod "a17ca407-6690-4060-8dc4-1a2e44fb32d8" (UID: "a17ca407-6690-4060-8dc4-1a2e44fb32d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.410336 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.413913 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx" (OuterVolumeSpecName: "kube-api-access-hxsmx") pod "a17ca407-6690-4060-8dc4-1a2e44fb32d8" (UID: "a17ca407-6690-4060-8dc4-1a2e44fb32d8"). InnerVolumeSpecName "kube-api-access-hxsmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.514838 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxsmx\" (UniqueName: \"kubernetes.io/projected/a17ca407-6690-4060-8dc4-1a2e44fb32d8-kube-api-access-hxsmx\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.671898 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a17ca407-6690-4060-8dc4-1a2e44fb32d8" (UID: "a17ca407-6690-4060-8dc4-1a2e44fb32d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.718120 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a17ca407-6690-4060-8dc4-1a2e44fb32d8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.895472 4998 generic.go:334] "Generic (PLEG): container finished" podID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerID="3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7" exitCode=0 Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.895538 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tc2c6" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.895559 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerDied","Data":"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7"} Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.896710 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tc2c6" event={"ID":"a17ca407-6690-4060-8dc4-1a2e44fb32d8","Type":"ContainerDied","Data":"9de8eaf27e8b454fbb3087b90f9280aac6442a5237c94385f2e40fba6907553f"} Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.896821 4998 scope.go:117] "RemoveContainer" containerID="3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.916756 4998 scope.go:117] "RemoveContainer" containerID="d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.927967 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.947623 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tc2c6"] Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.956368 4998 scope.go:117] "RemoveContainer" containerID="8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.978310 4998 scope.go:117] "RemoveContainer" containerID="3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7" Feb 03 08:52:57 crc kubenswrapper[4998]: E0203 08:52:57.978854 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7\": container with ID starting with 3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7 not found: ID does not exist" containerID="3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.978886 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7"} err="failed to get container status \"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7\": rpc error: code = NotFound desc = could not find container \"3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7\": container with ID starting with 3e8262cde2338d99726167a9f82c989921a7a389853e564105242cae38eeb6b7 not found: ID does not exist" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.978915 4998 scope.go:117] "RemoveContainer" containerID="d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768" Feb 03 08:52:57 crc kubenswrapper[4998]: E0203 08:52:57.979213 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768\": container with ID starting with d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768 not found: ID does not exist" containerID="d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.979243 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768"} err="failed to get container status \"d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768\": rpc error: code = NotFound desc = could not find container \"d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768\": container with ID starting with d748fe3ac6da1cdf4990a3b3ffe765569df66b74c0af4cb8da51f0cc83ba6768 not found: ID does not exist" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.979260 4998 scope.go:117] "RemoveContainer" containerID="8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d" Feb 03 08:52:57 crc kubenswrapper[4998]: E0203 08:52:57.979584 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d\": container with ID starting with 8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d not found: ID does not exist" containerID="8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d" Feb 03 08:52:57 crc kubenswrapper[4998]: I0203 08:52:57.979650 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d"} err="failed to get container status \"8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d\": rpc error: code = NotFound desc = could not find container \"8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d\": container with ID starting with 8ca96ee2d5c50455b3a081bfdf1afb435009f10342a232a28d3bfb8d719c105d not found: ID does not exist" Feb 03 08:52:58 crc kubenswrapper[4998]: I0203 08:52:58.441418 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" path="/var/lib/kubelet/pods/a17ca407-6690-4060-8dc4-1a2e44fb32d8/volumes" Feb 03 08:52:58 crc kubenswrapper[4998]: I0203 08:52:58.907478 4998 generic.go:334] "Generic (PLEG): container finished" podID="7b4e9b73-32ac-4ce4-b368-7bf898a77203" containerID="902de658799b556c98159961659608e7982b4826b627f6bb02e8f1bf5018079d" exitCode=0 Feb 03 08:52:58 crc kubenswrapper[4998]: I0203 08:52:58.907531 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xw255" event={"ID":"7b4e9b73-32ac-4ce4-b368-7bf898a77203","Type":"ContainerDied","Data":"902de658799b556c98159961659608e7982b4826b627f6bb02e8f1bf5018079d"} Feb 03 08:52:59 crc kubenswrapper[4998]: I0203 08:52:59.233746 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.280726 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xw255" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.465038 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle\") pod \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.465335 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5qln\" (UniqueName: \"kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln\") pod \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.465392 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data\") pod \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\" (UID: \"7b4e9b73-32ac-4ce4-b368-7bf898a77203\") " Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.471664 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln" (OuterVolumeSpecName: "kube-api-access-b5qln") pod "7b4e9b73-32ac-4ce4-b368-7bf898a77203" (UID: "7b4e9b73-32ac-4ce4-b368-7bf898a77203"). InnerVolumeSpecName "kube-api-access-b5qln". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.497585 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b4e9b73-32ac-4ce4-b368-7bf898a77203" (UID: "7b4e9b73-32ac-4ce4-b368-7bf898a77203"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.527578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data" (OuterVolumeSpecName: "config-data") pod "7b4e9b73-32ac-4ce4-b368-7bf898a77203" (UID: "7b4e9b73-32ac-4ce4-b368-7bf898a77203"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.567369 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.567406 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5qln\" (UniqueName: \"kubernetes.io/projected/7b4e9b73-32ac-4ce4-b368-7bf898a77203-kube-api-access-b5qln\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.567420 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4e9b73-32ac-4ce4-b368-7bf898a77203-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.922873 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xw255" event={"ID":"7b4e9b73-32ac-4ce4-b368-7bf898a77203","Type":"ContainerDied","Data":"8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d"} Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.922911 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8041dc79069c497557a855fb7be3d4d2910598e529c7495618055f6d805bd21d" Feb 03 08:53:00 crc kubenswrapper[4998]: I0203 08:53:00.922943 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xw255" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537045 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:53:01 crc kubenswrapper[4998]: E0203 08:53:01.537469 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="extract-utilities" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537487 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="extract-utilities" Feb 03 08:53:01 crc kubenswrapper[4998]: E0203 08:53:01.537503 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="registry-server" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537511 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="registry-server" Feb 03 08:53:01 crc kubenswrapper[4998]: E0203 08:53:01.537540 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4e9b73-32ac-4ce4-b368-7bf898a77203" containerName="keystone-db-sync" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537548 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4e9b73-32ac-4ce4-b368-7bf898a77203" containerName="keystone-db-sync" Feb 03 08:53:01 crc kubenswrapper[4998]: E0203 08:53:01.537569 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="extract-content" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537577 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="extract-content" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537772 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a17ca407-6690-4060-8dc4-1a2e44fb32d8" containerName="registry-server" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.537801 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4e9b73-32ac-4ce4-b368-7bf898a77203" containerName="keystone-db-sync" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.551187 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.551316 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.578850 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ks4s7"] Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.580320 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.584501 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-79lr8" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.584742 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.584928 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.585085 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.585272 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.600475 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ks4s7"] Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687418 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7npc4\" (UniqueName: \"kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687491 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf6nz\" (UniqueName: \"kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687529 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687556 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687701 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687940 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.687994 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.688053 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.688139 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.688258 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.688303 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.789994 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790048 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790103 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790149 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790173 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790205 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790239 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790288 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790321 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7npc4\" (UniqueName: \"kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.790347 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf6nz\" (UniqueName: \"kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.791169 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.791327 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.791923 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.792659 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.795780 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.796472 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.800138 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.809743 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf6nz\" (UniqueName: \"kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.810536 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7npc4\" (UniqueName: \"kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4\") pod \"dnsmasq-dns-7587496b69-crmkf\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.817701 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.817975 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle\") pod \"keystone-bootstrap-ks4s7\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.890290 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:01 crc kubenswrapper[4998]: I0203 08:53:01.916027 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.349169 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.473106 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.473310 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ks4s7"] Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.947522 4998 generic.go:334] "Generic (PLEG): container finished" podID="f08be859-9c65-4eae-8359-3107b76fc9df" containerID="b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473" exitCode=0 Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.947635 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7587496b69-crmkf" event={"ID":"f08be859-9c65-4eae-8359-3107b76fc9df","Type":"ContainerDied","Data":"b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473"} Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.948171 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7587496b69-crmkf" event={"ID":"f08be859-9c65-4eae-8359-3107b76fc9df","Type":"ContainerStarted","Data":"281499c922dd231a2229aad79c3e627997a785c5a00aecdc7149c4373438f6c2"} Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.950375 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ks4s7" event={"ID":"691b80e2-89db-4fa9-bdc1-08814aae52b6","Type":"ContainerStarted","Data":"43c34846f85a0a5ab432878af71cd16112e389d0d278f5b70a51f2222a9b9e5c"} Feb 03 08:53:02 crc kubenswrapper[4998]: I0203 08:53:02.950432 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ks4s7" event={"ID":"691b80e2-89db-4fa9-bdc1-08814aae52b6","Type":"ContainerStarted","Data":"ebcd44a1aeb0b76f17ae2db35a1119816c909c55beea5cdc04c9d7692afaeee6"} Feb 03 08:53:03 crc kubenswrapper[4998]: I0203 08:53:03.428219 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:53:03 crc kubenswrapper[4998]: E0203 08:53:03.428851 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:53:03 crc kubenswrapper[4998]: I0203 08:53:03.961290 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7587496b69-crmkf" event={"ID":"f08be859-9c65-4eae-8359-3107b76fc9df","Type":"ContainerStarted","Data":"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c"} Feb 03 08:53:03 crc kubenswrapper[4998]: I0203 08:53:03.961549 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:03 crc kubenswrapper[4998]: I0203 08:53:03.983079 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ks4s7" podStartSLOduration=2.983054527 podStartE2EDuration="2.983054527s" podCreationTimestamp="2026-02-03 08:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:53:03.001046729 +0000 UTC m=+7621.287740545" watchObservedRunningTime="2026-02-03 08:53:03.983054527 +0000 UTC m=+7622.269748333" Feb 03 08:53:03 crc kubenswrapper[4998]: I0203 08:53:03.986283 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7587496b69-crmkf" podStartSLOduration=2.986265458 podStartE2EDuration="2.986265458s" podCreationTimestamp="2026-02-03 08:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:53:03.978453636 +0000 UTC m=+7622.265147442" watchObservedRunningTime="2026-02-03 08:53:03.986265458 +0000 UTC m=+7622.272959264" Feb 03 08:53:06 crc kubenswrapper[4998]: I0203 08:53:06.985872 4998 generic.go:334] "Generic (PLEG): container finished" podID="691b80e2-89db-4fa9-bdc1-08814aae52b6" containerID="43c34846f85a0a5ab432878af71cd16112e389d0d278f5b70a51f2222a9b9e5c" exitCode=0 Feb 03 08:53:06 crc kubenswrapper[4998]: I0203 08:53:06.985958 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ks4s7" event={"ID":"691b80e2-89db-4fa9-bdc1-08814aae52b6","Type":"ContainerDied","Data":"43c34846f85a0a5ab432878af71cd16112e389d0d278f5b70a51f2222a9b9e5c"} Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.380897 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533449 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533579 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533611 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf6nz\" (UniqueName: \"kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533712 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533847 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.533887 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts\") pod \"691b80e2-89db-4fa9-bdc1-08814aae52b6\" (UID: \"691b80e2-89db-4fa9-bdc1-08814aae52b6\") " Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.539989 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.540019 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.540032 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts" (OuterVolumeSpecName: "scripts") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.540045 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz" (OuterVolumeSpecName: "kube-api-access-tf6nz") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "kube-api-access-tf6nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.556187 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data" (OuterVolumeSpecName: "config-data") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.560773 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "691b80e2-89db-4fa9-bdc1-08814aae52b6" (UID: "691b80e2-89db-4fa9-bdc1-08814aae52b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636184 4998 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636221 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf6nz\" (UniqueName: \"kubernetes.io/projected/691b80e2-89db-4fa9-bdc1-08814aae52b6-kube-api-access-tf6nz\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636234 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636243 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636251 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:08 crc kubenswrapper[4998]: I0203 08:53:08.636261 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/691b80e2-89db-4fa9-bdc1-08814aae52b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.013713 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ks4s7" event={"ID":"691b80e2-89db-4fa9-bdc1-08814aae52b6","Type":"ContainerDied","Data":"ebcd44a1aeb0b76f17ae2db35a1119816c909c55beea5cdc04c9d7692afaeee6"} Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.013776 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebcd44a1aeb0b76f17ae2db35a1119816c909c55beea5cdc04c9d7692afaeee6" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.014895 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ks4s7" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.186042 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ks4s7"] Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.197203 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ks4s7"] Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.277892 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-sh5xm"] Feb 03 08:53:09 crc kubenswrapper[4998]: E0203 08:53:09.278295 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="691b80e2-89db-4fa9-bdc1-08814aae52b6" containerName="keystone-bootstrap" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.278316 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="691b80e2-89db-4fa9-bdc1-08814aae52b6" containerName="keystone-bootstrap" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.278525 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="691b80e2-89db-4fa9-bdc1-08814aae52b6" containerName="keystone-bootstrap" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.279176 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.280682 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.281308 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.281550 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.281599 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-79lr8" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.281742 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.297584 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sh5xm"] Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.449935 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.450312 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.450359 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.450459 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdjvf\" (UniqueName: \"kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.450685 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.450742 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.552733 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.552815 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.552857 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.552943 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdjvf\" (UniqueName: \"kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.553054 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.553089 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.575866 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.576083 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.587456 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.594761 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.596248 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.613455 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdjvf\" (UniqueName: \"kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf\") pod \"keystone-bootstrap-sh5xm\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:09 crc kubenswrapper[4998]: I0203 08:53:09.897745 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:10 crc kubenswrapper[4998]: I0203 08:53:10.377567 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sh5xm"] Feb 03 08:53:10 crc kubenswrapper[4998]: W0203 08:53:10.381749 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27863dd2_1d6b_4bd4_b215_e8e18a08146f.slice/crio-2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9 WatchSource:0}: Error finding container 2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9: Status 404 returned error can't find the container with id 2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9 Feb 03 08:53:10 crc kubenswrapper[4998]: I0203 08:53:10.436462 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="691b80e2-89db-4fa9-bdc1-08814aae52b6" path="/var/lib/kubelet/pods/691b80e2-89db-4fa9-bdc1-08814aae52b6/volumes" Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.028503 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sh5xm" event={"ID":"27863dd2-1d6b-4bd4-b215-e8e18a08146f","Type":"ContainerStarted","Data":"965de3887b885f0f44470783e308209cd737fd8394de56641be91635362da69f"} Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.028888 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sh5xm" event={"ID":"27863dd2-1d6b-4bd4-b215-e8e18a08146f","Type":"ContainerStarted","Data":"2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9"} Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.049347 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-sh5xm" podStartSLOduration=2.049321944 podStartE2EDuration="2.049321944s" podCreationTimestamp="2026-02-03 08:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:53:11.045410153 +0000 UTC m=+7629.332103979" watchObservedRunningTime="2026-02-03 08:53:11.049321944 +0000 UTC m=+7629.336015900" Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.891988 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.966424 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:53:11 crc kubenswrapper[4998]: I0203 08:53:11.966698 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="dnsmasq-dns" containerID="cri-o://672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0" gracePeriod=10 Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.445721 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.610937 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb\") pod \"6de37a47-b912-4648-afd8-43e6a6f8187f\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.611001 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc\") pod \"6de37a47-b912-4648-afd8-43e6a6f8187f\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.611053 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb\") pod \"6de37a47-b912-4648-afd8-43e6a6f8187f\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.611088 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config\") pod \"6de37a47-b912-4648-afd8-43e6a6f8187f\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.611268 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp6ww\" (UniqueName: \"kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww\") pod \"6de37a47-b912-4648-afd8-43e6a6f8187f\" (UID: \"6de37a47-b912-4648-afd8-43e6a6f8187f\") " Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.618955 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww" (OuterVolumeSpecName: "kube-api-access-tp6ww") pod "6de37a47-b912-4648-afd8-43e6a6f8187f" (UID: "6de37a47-b912-4648-afd8-43e6a6f8187f"). InnerVolumeSpecName "kube-api-access-tp6ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.653424 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6de37a47-b912-4648-afd8-43e6a6f8187f" (UID: "6de37a47-b912-4648-afd8-43e6a6f8187f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.666557 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config" (OuterVolumeSpecName: "config") pod "6de37a47-b912-4648-afd8-43e6a6f8187f" (UID: "6de37a47-b912-4648-afd8-43e6a6f8187f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.671956 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6de37a47-b912-4648-afd8-43e6a6f8187f" (UID: "6de37a47-b912-4648-afd8-43e6a6f8187f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.673222 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6de37a47-b912-4648-afd8-43e6a6f8187f" (UID: "6de37a47-b912-4648-afd8-43e6a6f8187f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.712718 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp6ww\" (UniqueName: \"kubernetes.io/projected/6de37a47-b912-4648-afd8-43e6a6f8187f-kube-api-access-tp6ww\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.712768 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.712799 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.712809 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:12 crc kubenswrapper[4998]: I0203 08:53:12.712818 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6de37a47-b912-4648-afd8-43e6a6f8187f-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.047330 4998 generic.go:334] "Generic (PLEG): container finished" podID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerID="672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0" exitCode=0 Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.047700 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.047744 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" event={"ID":"6de37a47-b912-4648-afd8-43e6a6f8187f","Type":"ContainerDied","Data":"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0"} Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.048019 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" event={"ID":"6de37a47-b912-4648-afd8-43e6a6f8187f","Type":"ContainerDied","Data":"fa60074854880cd8d8127fc43d3ae05ad5073ec7a64998a2a7eed382c1ebfb06"} Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.048049 4998 scope.go:117] "RemoveContainer" containerID="672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.070259 4998 scope.go:117] "RemoveContainer" containerID="0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.085902 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.094423 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b779fdfb7-g89gs"] Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.195725 4998 scope.go:117] "RemoveContainer" containerID="672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0" Feb 03 08:53:13 crc kubenswrapper[4998]: E0203 08:53:13.196485 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0\": container with ID starting with 672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0 not found: ID does not exist" containerID="672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.196523 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0"} err="failed to get container status \"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0\": rpc error: code = NotFound desc = could not find container \"672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0\": container with ID starting with 672e42169181d9c572a9d1705e1416737b3d219e238f70254cb287b1a16225f0 not found: ID does not exist" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.196550 4998 scope.go:117] "RemoveContainer" containerID="0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd" Feb 03 08:53:13 crc kubenswrapper[4998]: E0203 08:53:13.196971 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd\": container with ID starting with 0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd not found: ID does not exist" containerID="0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd" Feb 03 08:53:13 crc kubenswrapper[4998]: I0203 08:53:13.197000 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd"} err="failed to get container status \"0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd\": rpc error: code = NotFound desc = could not find container \"0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd\": container with ID starting with 0493901e9625570c53993f8d996b130851a818437c3b33d07aedf741ebf45dcd not found: ID does not exist" Feb 03 08:53:14 crc kubenswrapper[4998]: I0203 08:53:14.058666 4998 generic.go:334] "Generic (PLEG): container finished" podID="27863dd2-1d6b-4bd4-b215-e8e18a08146f" containerID="965de3887b885f0f44470783e308209cd737fd8394de56641be91635362da69f" exitCode=0 Feb 03 08:53:14 crc kubenswrapper[4998]: I0203 08:53:14.058717 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sh5xm" event={"ID":"27863dd2-1d6b-4bd4-b215-e8e18a08146f","Type":"ContainerDied","Data":"965de3887b885f0f44470783e308209cd737fd8394de56641be91635362da69f"} Feb 03 08:53:14 crc kubenswrapper[4998]: I0203 08:53:14.427511 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:53:14 crc kubenswrapper[4998]: E0203 08:53:14.428103 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:53:14 crc kubenswrapper[4998]: I0203 08:53:14.444607 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" path="/var/lib/kubelet/pods/6de37a47-b912-4648-afd8-43e6a6f8187f/volumes" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.425126 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565315 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565381 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565442 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565556 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565614 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdjvf\" (UniqueName: \"kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.565690 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys\") pod \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\" (UID: \"27863dd2-1d6b-4bd4-b215-e8e18a08146f\") " Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.571347 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.571650 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf" (OuterVolumeSpecName: "kube-api-access-pdjvf") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "kube-api-access-pdjvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.571661 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts" (OuterVolumeSpecName: "scripts") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.572600 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.589998 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.597129 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data" (OuterVolumeSpecName: "config-data") pod "27863dd2-1d6b-4bd4-b215-e8e18a08146f" (UID: "27863dd2-1d6b-4bd4-b215-e8e18a08146f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668422 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668455 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668464 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668475 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668487 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdjvf\" (UniqueName: \"kubernetes.io/projected/27863dd2-1d6b-4bd4-b215-e8e18a08146f-kube-api-access-pdjvf\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:15 crc kubenswrapper[4998]: I0203 08:53:15.668495 4998 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/27863dd2-1d6b-4bd4-b215-e8e18a08146f-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.079313 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sh5xm" event={"ID":"27863dd2-1d6b-4bd4-b215-e8e18a08146f","Type":"ContainerDied","Data":"2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9"} Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.079612 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2aa64095c045a830b946e2a31f66a81ec878d691e55e3a698e7e114666a17ac9" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.079348 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sh5xm" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.164982 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5bdf89b5c5-zrqjk"] Feb 03 08:53:16 crc kubenswrapper[4998]: E0203 08:53:16.165409 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27863dd2-1d6b-4bd4-b215-e8e18a08146f" containerName="keystone-bootstrap" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.165428 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="27863dd2-1d6b-4bd4-b215-e8e18a08146f" containerName="keystone-bootstrap" Feb 03 08:53:16 crc kubenswrapper[4998]: E0203 08:53:16.165471 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="init" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.165480 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="init" Feb 03 08:53:16 crc kubenswrapper[4998]: E0203 08:53:16.165502 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="dnsmasq-dns" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.165510 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="dnsmasq-dns" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.165713 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="27863dd2-1d6b-4bd4-b215-e8e18a08146f" containerName="keystone-bootstrap" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.165737 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="dnsmasq-dns" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.166503 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.171050 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.171205 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-79lr8" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.171424 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.172012 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.173517 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bdf89b5c5-zrqjk"] Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.277410 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-combined-ca-bundle\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.277581 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-credential-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.277706 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-fernet-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.277834 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-scripts\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.277962 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpwb7\" (UniqueName: \"kubernetes.io/projected/94702dda-48c1-4162-976e-0d624e111b04-kube-api-access-rpwb7\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.278003 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-config-data\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380615 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpwb7\" (UniqueName: \"kubernetes.io/projected/94702dda-48c1-4162-976e-0d624e111b04-kube-api-access-rpwb7\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380652 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-config-data\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-combined-ca-bundle\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380743 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-credential-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380778 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-fernet-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.380838 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-scripts\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.386474 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-credential-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.386956 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-scripts\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.387433 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-fernet-keys\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.387440 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-combined-ca-bundle\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.388268 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94702dda-48c1-4162-976e-0d624e111b04-config-data\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.405164 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpwb7\" (UniqueName: \"kubernetes.io/projected/94702dda-48c1-4162-976e-0d624e111b04-kube-api-access-rpwb7\") pod \"keystone-5bdf89b5c5-zrqjk\" (UID: \"94702dda-48c1-4162-976e-0d624e111b04\") " pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.493412 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:16 crc kubenswrapper[4998]: I0203 08:53:16.914728 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bdf89b5c5-zrqjk"] Feb 03 08:53:17 crc kubenswrapper[4998]: I0203 08:53:17.088347 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bdf89b5c5-zrqjk" event={"ID":"94702dda-48c1-4162-976e-0d624e111b04","Type":"ContainerStarted","Data":"804e89f7fac9854acc733a5ff1d92564edb09d1d7c761b71d8eaf5645eb794d3"} Feb 03 08:53:17 crc kubenswrapper[4998]: I0203 08:53:17.088679 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:17 crc kubenswrapper[4998]: I0203 08:53:17.088690 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bdf89b5c5-zrqjk" event={"ID":"94702dda-48c1-4162-976e-0d624e111b04","Type":"ContainerStarted","Data":"f04e6b5c491120e7d14d479c082e0f033e25ad6c5b5df8e9549f0149f37b3fd6"} Feb 03 08:53:17 crc kubenswrapper[4998]: I0203 08:53:17.105687 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5bdf89b5c5-zrqjk" podStartSLOduration=1.105671432 podStartE2EDuration="1.105671432s" podCreationTimestamp="2026-02-03 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:53:17.105670262 +0000 UTC m=+7635.392364088" watchObservedRunningTime="2026-02-03 08:53:17.105671432 +0000 UTC m=+7635.392365238" Feb 03 08:53:17 crc kubenswrapper[4998]: I0203 08:53:17.228417 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7b779fdfb7-g89gs" podUID="6de37a47-b912-4648-afd8-43e6a6f8187f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.36:5353: i/o timeout" Feb 03 08:53:29 crc kubenswrapper[4998]: I0203 08:53:29.427929 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:53:29 crc kubenswrapper[4998]: E0203 08:53:29.430229 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:53:40 crc kubenswrapper[4998]: I0203 08:53:40.428107 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:53:40 crc kubenswrapper[4998]: E0203 08:53:40.430107 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:53:47 crc kubenswrapper[4998]: I0203 08:53:47.922944 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5bdf89b5c5-zrqjk" Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.947039 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.949602 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.953252 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.953332 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-q7br2" Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.953533 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 03 08:53:49 crc kubenswrapper[4998]: I0203 08:53:49.958183 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.068474 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.068856 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhxxx\" (UniqueName: \"kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.068967 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.172013 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhxxx\" (UniqueName: \"kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.172472 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.173771 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.174797 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.181328 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.189853 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhxxx\" (UniqueName: \"kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx\") pod \"openstackclient\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.277194 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 08:53:50 crc kubenswrapper[4998]: I0203 08:53:50.721500 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 08:53:51 crc kubenswrapper[4998]: I0203 08:53:51.387267 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"098a8d80-a86e-4f18-811b-dec1c91614d8","Type":"ContainerStarted","Data":"78bb58710871d13cf9e277cf5c2716bf479f277a7353a90314479dee62f586bc"} Feb 03 08:53:54 crc kubenswrapper[4998]: I0203 08:53:54.428257 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:53:54 crc kubenswrapper[4998]: E0203 08:53:54.428848 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:54:01 crc kubenswrapper[4998]: I0203 08:54:01.497764 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"098a8d80-a86e-4f18-811b-dec1c91614d8","Type":"ContainerStarted","Data":"fb292e710049b7e29363c687f3180632419bef33ce334d7da9e9bb6582fe69b9"} Feb 03 08:54:01 crc kubenswrapper[4998]: I0203 08:54:01.517848 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.069274227 podStartE2EDuration="12.517831337s" podCreationTimestamp="2026-02-03 08:53:49 +0000 UTC" firstStartedPulling="2026-02-03 08:53:50.738100198 +0000 UTC m=+7669.024794004" lastFinishedPulling="2026-02-03 08:54:01.186657308 +0000 UTC m=+7679.473351114" observedRunningTime="2026-02-03 08:54:01.515954463 +0000 UTC m=+7679.802648279" watchObservedRunningTime="2026-02-03 08:54:01.517831337 +0000 UTC m=+7679.804525163" Feb 03 08:54:09 crc kubenswrapper[4998]: I0203 08:54:09.427314 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:54:09 crc kubenswrapper[4998]: E0203 08:54:09.428317 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 08:54:22 crc kubenswrapper[4998]: I0203 08:54:22.434757 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:54:22 crc kubenswrapper[4998]: I0203 08:54:22.705525 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115"} Feb 03 08:55:07 crc kubenswrapper[4998]: I0203 08:55:07.069463 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qkznt"] Feb 03 08:55:07 crc kubenswrapper[4998]: I0203 08:55:07.077000 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-qkznt"] Feb 03 08:55:08 crc kubenswrapper[4998]: I0203 08:55:08.439653 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e429ee71-b2e9-4d43-b241-b3de2b261d9d" path="/var/lib/kubelet/pods/e429ee71-b2e9-4d43-b241-b3de2b261d9d/volumes" Feb 03 08:55:09 crc kubenswrapper[4998]: I0203 08:55:09.154553 4998 scope.go:117] "RemoveContainer" containerID="9d63123d9d2b80d986be0de898fdf65ac2c14a5d2ed324fd165204982a7ccf38" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.793066 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-g9cxn"] Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.795169 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.807768 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-e639-account-create-update-btlh6"] Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.809053 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.811073 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.819686 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g9cxn"] Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.830948 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e639-account-create-update-btlh6"] Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.833486 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.833827 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzbsv\" (UniqueName: \"kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.935528 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.936065 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l8nm\" (UniqueName: \"kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.936307 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.936315 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.936712 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzbsv\" (UniqueName: \"kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:23 crc kubenswrapper[4998]: I0203 08:55:23.966040 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzbsv\" (UniqueName: \"kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv\") pod \"barbican-db-create-g9cxn\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.038348 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l8nm\" (UniqueName: \"kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.038453 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.039616 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.057699 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l8nm\" (UniqueName: \"kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm\") pod \"barbican-e639-account-create-update-btlh6\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.128945 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.139248 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.455117 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e639-account-create-update-btlh6"] Feb 03 08:55:24 crc kubenswrapper[4998]: I0203 08:55:24.580383 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g9cxn"] Feb 03 08:55:24 crc kubenswrapper[4998]: W0203 08:55:24.582858 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79bf0736_7674_434e_9bd1_25f3950556d6.slice/crio-edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66 WatchSource:0}: Error finding container edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66: Status 404 returned error can't find the container with id edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66 Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.307644 4998 generic.go:334] "Generic (PLEG): container finished" podID="06811dbe-b72e-435d-b0a3-474209808036" containerID="19be361f68d5fa33337eec7b703ddcdbfcc0d5801591421c78a19618b29382fb" exitCode=0 Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.307695 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e639-account-create-update-btlh6" event={"ID":"06811dbe-b72e-435d-b0a3-474209808036","Type":"ContainerDied","Data":"19be361f68d5fa33337eec7b703ddcdbfcc0d5801591421c78a19618b29382fb"} Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.307743 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e639-account-create-update-btlh6" event={"ID":"06811dbe-b72e-435d-b0a3-474209808036","Type":"ContainerStarted","Data":"ee151858174fd049ce81f16d2e2912021cd0cfff082b86f98298d029e63b137f"} Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.309996 4998 generic.go:334] "Generic (PLEG): container finished" podID="79bf0736-7674-434e-9bd1-25f3950556d6" containerID="94d9071e95c57827561a5252c694b0ab5ed8f25f7224d4fa200cbf12551945aa" exitCode=0 Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.310030 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g9cxn" event={"ID":"79bf0736-7674-434e-9bd1-25f3950556d6","Type":"ContainerDied","Data":"94d9071e95c57827561a5252c694b0ab5ed8f25f7224d4fa200cbf12551945aa"} Feb 03 08:55:25 crc kubenswrapper[4998]: I0203 08:55:25.310056 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g9cxn" event={"ID":"79bf0736-7674-434e-9bd1-25f3950556d6","Type":"ContainerStarted","Data":"edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66"} Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.656445 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.663607 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.686943 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts\") pod \"79bf0736-7674-434e-9bd1-25f3950556d6\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688111 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts\") pod \"06811dbe-b72e-435d-b0a3-474209808036\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688179 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzbsv\" (UniqueName: \"kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv\") pod \"79bf0736-7674-434e-9bd1-25f3950556d6\" (UID: \"79bf0736-7674-434e-9bd1-25f3950556d6\") " Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688234 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l8nm\" (UniqueName: \"kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm\") pod \"06811dbe-b72e-435d-b0a3-474209808036\" (UID: \"06811dbe-b72e-435d-b0a3-474209808036\") " Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688350 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79bf0736-7674-434e-9bd1-25f3950556d6" (UID: "79bf0736-7674-434e-9bd1-25f3950556d6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688806 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "06811dbe-b72e-435d-b0a3-474209808036" (UID: "06811dbe-b72e-435d-b0a3-474209808036"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.688864 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bf0736-7674-434e-9bd1-25f3950556d6-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.694980 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm" (OuterVolumeSpecName: "kube-api-access-7l8nm") pod "06811dbe-b72e-435d-b0a3-474209808036" (UID: "06811dbe-b72e-435d-b0a3-474209808036"). InnerVolumeSpecName "kube-api-access-7l8nm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.695053 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv" (OuterVolumeSpecName: "kube-api-access-zzbsv") pod "79bf0736-7674-434e-9bd1-25f3950556d6" (UID: "79bf0736-7674-434e-9bd1-25f3950556d6"). InnerVolumeSpecName "kube-api-access-zzbsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.790601 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06811dbe-b72e-435d-b0a3-474209808036-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.790720 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzbsv\" (UniqueName: \"kubernetes.io/projected/79bf0736-7674-434e-9bd1-25f3950556d6-kube-api-access-zzbsv\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:26 crc kubenswrapper[4998]: I0203 08:55:26.790734 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l8nm\" (UniqueName: \"kubernetes.io/projected/06811dbe-b72e-435d-b0a3-474209808036-kube-api-access-7l8nm\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.330513 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g9cxn" event={"ID":"79bf0736-7674-434e-9bd1-25f3950556d6","Type":"ContainerDied","Data":"edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66"} Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.331063 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edef0acee253ae7cac210baed402059d85b0b0db4d1ac34845fafa9c2f513d66" Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.330570 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g9cxn" Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.332371 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e639-account-create-update-btlh6" Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.332379 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e639-account-create-update-btlh6" event={"ID":"06811dbe-b72e-435d-b0a3-474209808036","Type":"ContainerDied","Data":"ee151858174fd049ce81f16d2e2912021cd0cfff082b86f98298d029e63b137f"} Feb 03 08:55:27 crc kubenswrapper[4998]: I0203 08:55:27.332461 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee151858174fd049ce81f16d2e2912021cd0cfff082b86f98298d029e63b137f" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.065925 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-j6skg"] Feb 03 08:55:29 crc kubenswrapper[4998]: E0203 08:55:29.066518 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06811dbe-b72e-435d-b0a3-474209808036" containerName="mariadb-account-create-update" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.066531 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="06811dbe-b72e-435d-b0a3-474209808036" containerName="mariadb-account-create-update" Feb 03 08:55:29 crc kubenswrapper[4998]: E0203 08:55:29.066565 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79bf0736-7674-434e-9bd1-25f3950556d6" containerName="mariadb-database-create" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.066571 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="79bf0736-7674-434e-9bd1-25f3950556d6" containerName="mariadb-database-create" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.066717 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="06811dbe-b72e-435d-b0a3-474209808036" containerName="mariadb-account-create-update" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.066730 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="79bf0736-7674-434e-9bd1-25f3950556d6" containerName="mariadb-database-create" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.067239 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.070064 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zkjzn" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.075700 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.078227 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-j6skg"] Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.134369 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h7pz\" (UniqueName: \"kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.134543 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.134612 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.236216 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h7pz\" (UniqueName: \"kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.236286 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.236312 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.240968 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.242039 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.262255 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h7pz\" (UniqueName: \"kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz\") pod \"barbican-db-sync-j6skg\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.390519 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.867148 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 08:55:29 crc kubenswrapper[4998]: I0203 08:55:29.875471 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-j6skg"] Feb 03 08:55:30 crc kubenswrapper[4998]: I0203 08:55:30.357004 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j6skg" event={"ID":"7a12fec4-4f0b-415b-8dbe-2a81626c82d6","Type":"ContainerStarted","Data":"e30c429b7eec0aac8b731d65bcb6baa366d8ecb035664be91c04e5545b3c5568"} Feb 03 08:55:35 crc kubenswrapper[4998]: I0203 08:55:35.417437 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j6skg" event={"ID":"7a12fec4-4f0b-415b-8dbe-2a81626c82d6","Type":"ContainerStarted","Data":"e8de2d4f278eedd2fd9e28c87db243b68a732d3feadfc8fa81335572005d6d58"} Feb 03 08:55:35 crc kubenswrapper[4998]: I0203 08:55:35.449733 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-j6skg" podStartSLOduration=2.0770074689999998 podStartE2EDuration="6.449714618s" podCreationTimestamp="2026-02-03 08:55:29 +0000 UTC" firstStartedPulling="2026-02-03 08:55:29.866846636 +0000 UTC m=+7768.153540442" lastFinishedPulling="2026-02-03 08:55:34.239553785 +0000 UTC m=+7772.526247591" observedRunningTime="2026-02-03 08:55:35.440394843 +0000 UTC m=+7773.727088639" watchObservedRunningTime="2026-02-03 08:55:35.449714618 +0000 UTC m=+7773.736408424" Feb 03 08:55:37 crc kubenswrapper[4998]: I0203 08:55:37.438278 4998 generic.go:334] "Generic (PLEG): container finished" podID="7a12fec4-4f0b-415b-8dbe-2a81626c82d6" containerID="e8de2d4f278eedd2fd9e28c87db243b68a732d3feadfc8fa81335572005d6d58" exitCode=0 Feb 03 08:55:37 crc kubenswrapper[4998]: I0203 08:55:37.438426 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j6skg" event={"ID":"7a12fec4-4f0b-415b-8dbe-2a81626c82d6","Type":"ContainerDied","Data":"e8de2d4f278eedd2fd9e28c87db243b68a732d3feadfc8fa81335572005d6d58"} Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.748441 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.801980 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5h7pz\" (UniqueName: \"kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz\") pod \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.802599 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data\") pod \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.802652 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle\") pod \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\" (UID: \"7a12fec4-4f0b-415b-8dbe-2a81626c82d6\") " Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.809053 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7a12fec4-4f0b-415b-8dbe-2a81626c82d6" (UID: "7a12fec4-4f0b-415b-8dbe-2a81626c82d6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.809100 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz" (OuterVolumeSpecName: "kube-api-access-5h7pz") pod "7a12fec4-4f0b-415b-8dbe-2a81626c82d6" (UID: "7a12fec4-4f0b-415b-8dbe-2a81626c82d6"). InnerVolumeSpecName "kube-api-access-5h7pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.825280 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a12fec4-4f0b-415b-8dbe-2a81626c82d6" (UID: "7a12fec4-4f0b-415b-8dbe-2a81626c82d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.905477 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.905510 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:38 crc kubenswrapper[4998]: I0203 08:55:38.905522 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5h7pz\" (UniqueName: \"kubernetes.io/projected/7a12fec4-4f0b-415b-8dbe-2a81626c82d6-kube-api-access-5h7pz\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.452694 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j6skg" event={"ID":"7a12fec4-4f0b-415b-8dbe-2a81626c82d6","Type":"ContainerDied","Data":"e30c429b7eec0aac8b731d65bcb6baa366d8ecb035664be91c04e5545b3c5568"} Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.452730 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e30c429b7eec0aac8b731d65bcb6baa366d8ecb035664be91c04e5545b3c5568" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.452753 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j6skg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.676028 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d84f77cbd-f74wg"] Feb 03 08:55:39 crc kubenswrapper[4998]: E0203 08:55:39.676624 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a12fec4-4f0b-415b-8dbe-2a81626c82d6" containerName="barbican-db-sync" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.676651 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a12fec4-4f0b-415b-8dbe-2a81626c82d6" containerName="barbican-db-sync" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.676960 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a12fec4-4f0b-415b-8dbe-2a81626c82d6" containerName="barbican-db-sync" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.678169 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.679382 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-649f778c97-ksh5g"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.680530 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zkjzn" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.680688 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.685672 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.685896 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.686075 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.720954 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-649f778c97-ksh5g"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721720 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgq4h\" (UniqueName: \"kubernetes.io/projected/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-kube-api-access-tgq4h\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721769 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721840 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data-custom\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721876 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-logs\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721955 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-combined-ca-bundle\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.721995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data-custom\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.722021 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmnx6\" (UniqueName: \"kubernetes.io/projected/e9db618d-21b6-4555-9c37-d25b45669164-kube-api-access-xmnx6\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.722090 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.722113 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9db618d-21b6-4555-9c37-d25b45669164-logs\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.722153 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-combined-ca-bundle\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.732551 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d84f77cbd-f74wg"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.776477 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.777953 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.819640 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824171 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-combined-ca-bundle\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824229 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data-custom\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmnx6\" (UniqueName: \"kubernetes.io/projected/e9db618d-21b6-4555-9c37-d25b45669164-kube-api-access-xmnx6\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824315 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824356 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824374 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9db618d-21b6-4555-9c37-d25b45669164-logs\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824395 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-combined-ca-bundle\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824414 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgq4h\" (UniqueName: \"kubernetes.io/projected/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-kube-api-access-tgq4h\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824433 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824458 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824478 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data-custom\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824502 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824525 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-logs\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824567 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsvpp\" (UniqueName: \"kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.824599 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.825516 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9db618d-21b6-4555-9c37-d25b45669164-logs\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.829712 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-logs\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.830173 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data-custom\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.831411 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-config-data\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.831575 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data-custom\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.833966 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-config-data\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.836731 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9db618d-21b6-4555-9c37-d25b45669164-combined-ca-bundle\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.841227 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-combined-ca-bundle\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.851694 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmnx6\" (UniqueName: \"kubernetes.io/projected/e9db618d-21b6-4555-9c37-d25b45669164-kube-api-access-xmnx6\") pod \"barbican-keystone-listener-d84f77cbd-f74wg\" (UID: \"e9db618d-21b6-4555-9c37-d25b45669164\") " pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.856508 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgq4h\" (UniqueName: \"kubernetes.io/projected/5c31eb51-94cb-4eee-b8e2-b1ae44f370ad-kube-api-access-tgq4h\") pod \"barbican-worker-649f778c97-ksh5g\" (UID: \"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad\") " pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.927130 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.927205 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsvpp\" (UniqueName: \"kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.927250 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.927357 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.927411 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.928525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.929371 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.930238 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.931396 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.936416 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6ff6f58c8-vtpc6"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.938328 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.940664 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.948266 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6ff6f58c8-vtpc6"] Feb 03 08:55:39 crc kubenswrapper[4998]: I0203 08:55:39.949069 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsvpp\" (UniqueName: \"kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp\") pod \"dnsmasq-dns-55df84ddd5-j4nh8\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.004367 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.020975 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-649f778c97-ksh5g" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.028831 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/551dd212-c750-41d2-8eab-7b2f25cd8d20-logs\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.028918 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data-custom\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.028972 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-combined-ca-bundle\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.029008 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlklj\" (UniqueName: \"kubernetes.io/projected/551dd212-c750-41d2-8eab-7b2f25cd8d20-kube-api-access-hlklj\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.029030 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.099543 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.130952 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-combined-ca-bundle\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.131386 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlklj\" (UniqueName: \"kubernetes.io/projected/551dd212-c750-41d2-8eab-7b2f25cd8d20-kube-api-access-hlklj\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.131456 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.131518 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/551dd212-c750-41d2-8eab-7b2f25cd8d20-logs\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.131626 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data-custom\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.134217 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/551dd212-c750-41d2-8eab-7b2f25cd8d20-logs\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.139496 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data-custom\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.139995 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-combined-ca-bundle\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.146390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/551dd212-c750-41d2-8eab-7b2f25cd8d20-config-data\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.152454 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlklj\" (UniqueName: \"kubernetes.io/projected/551dd212-c750-41d2-8eab-7b2f25cd8d20-kube-api-access-hlklj\") pod \"barbican-api-6ff6f58c8-vtpc6\" (UID: \"551dd212-c750-41d2-8eab-7b2f25cd8d20\") " pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.309476 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.512857 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-649f778c97-ksh5g"] Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.520125 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d84f77cbd-f74wg"] Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.666415 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:55:40 crc kubenswrapper[4998]: I0203 08:55:40.807403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6ff6f58c8-vtpc6"] Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.477656 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" event={"ID":"e9db618d-21b6-4555-9c37-d25b45669164","Type":"ContainerStarted","Data":"9e39a77088a6ee4e4a2ed932f394cfe6b343c7675eb72f6b53ca8b69b30c8377"} Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.482014 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-649f778c97-ksh5g" event={"ID":"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad","Type":"ContainerStarted","Data":"8b01f0615ed1cebf877173241c32e77004147ec95e780dadfb568faed12357ed"} Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.484636 4998 generic.go:334] "Generic (PLEG): container finished" podID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerID="ec05834b227957cc620089f1a6c675ccebead4c58326835d00ca5a118471bf9a" exitCode=0 Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.484671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" event={"ID":"a3c0768e-90fd-40da-90ff-67a8c5022d07","Type":"ContainerDied","Data":"ec05834b227957cc620089f1a6c675ccebead4c58326835d00ca5a118471bf9a"} Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.484708 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" event={"ID":"a3c0768e-90fd-40da-90ff-67a8c5022d07","Type":"ContainerStarted","Data":"90c4ff50f252c3fc90da058c3ddd4ee44459ad0f8aa927e488caa21e46beb7e4"} Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.487001 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6ff6f58c8-vtpc6" event={"ID":"551dd212-c750-41d2-8eab-7b2f25cd8d20","Type":"ContainerStarted","Data":"d1b3b6a79aff35bce10e3c0d05954eb205c0cd5d848add1df3931e7ea829693e"} Feb 03 08:55:41 crc kubenswrapper[4998]: I0203 08:55:41.487073 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6ff6f58c8-vtpc6" event={"ID":"551dd212-c750-41d2-8eab-7b2f25cd8d20","Type":"ContainerStarted","Data":"2bbeed0f4f66c4238c10aad4735ece18f7218230243e4dc1c5a91eb1b1ffada5"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.500463 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6ff6f58c8-vtpc6" event={"ID":"551dd212-c750-41d2-8eab-7b2f25cd8d20","Type":"ContainerStarted","Data":"c20e58783c2d2e72f8df3ffa8273d32a4adcb1578c39c42cd00891161499bf3e"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.501132 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.501159 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.502481 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" event={"ID":"e9db618d-21b6-4555-9c37-d25b45669164","Type":"ContainerStarted","Data":"d917d70e836fe7a62a988f33d64825113150afec93d6e30ceedd45d2c2463d77"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.502521 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" event={"ID":"e9db618d-21b6-4555-9c37-d25b45669164","Type":"ContainerStarted","Data":"d1a8b549d3d2fd330e3df0886a21ae93af2dbd9aea412e0d5f7191131e678e28"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.504818 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-649f778c97-ksh5g" event={"ID":"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad","Type":"ContainerStarted","Data":"9ecb9958d42d3f07e4430a3ae9588caa68d885e92c21629f54adf1f0d9567e1a"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.504852 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-649f778c97-ksh5g" event={"ID":"5c31eb51-94cb-4eee-b8e2-b1ae44f370ad","Type":"ContainerStarted","Data":"88cc13b2cc7d5998499b80e9171bfb975a3fd01d86c31ba1e512cf48ff82b0d7"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.506964 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" event={"ID":"a3c0768e-90fd-40da-90ff-67a8c5022d07","Type":"ContainerStarted","Data":"91d9dcda05fc7ab18eb488cffb7ab884cc8641cbe38e2630c178c8d2edf65282"} Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.507125 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.523522 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6ff6f58c8-vtpc6" podStartSLOduration=3.523496508 podStartE2EDuration="3.523496508s" podCreationTimestamp="2026-02-03 08:55:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:55:42.520308398 +0000 UTC m=+7780.807002204" watchObservedRunningTime="2026-02-03 08:55:42.523496508 +0000 UTC m=+7780.810190324" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.544068 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d84f77cbd-f74wg" podStartSLOduration=2.4208090159999998 podStartE2EDuration="3.544053882s" podCreationTimestamp="2026-02-03 08:55:39 +0000 UTC" firstStartedPulling="2026-02-03 08:55:40.533162606 +0000 UTC m=+7778.819856412" lastFinishedPulling="2026-02-03 08:55:41.656407472 +0000 UTC m=+7779.943101278" observedRunningTime="2026-02-03 08:55:42.540755368 +0000 UTC m=+7780.827449184" watchObservedRunningTime="2026-02-03 08:55:42.544053882 +0000 UTC m=+7780.830747688" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.569515 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-649f778c97-ksh5g" podStartSLOduration=2.446159996 podStartE2EDuration="3.569490944s" podCreationTimestamp="2026-02-03 08:55:39 +0000 UTC" firstStartedPulling="2026-02-03 08:55:40.521613448 +0000 UTC m=+7778.808307264" lastFinishedPulling="2026-02-03 08:55:41.644944406 +0000 UTC m=+7779.931638212" observedRunningTime="2026-02-03 08:55:42.558514512 +0000 UTC m=+7780.845208338" watchObservedRunningTime="2026-02-03 08:55:42.569490944 +0000 UTC m=+7780.856184760" Feb 03 08:55:42 crc kubenswrapper[4998]: I0203 08:55:42.589804 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" podStartSLOduration=3.589770219 podStartE2EDuration="3.589770219s" podCreationTimestamp="2026-02-03 08:55:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:55:42.575794543 +0000 UTC m=+7780.862488369" watchObservedRunningTime="2026-02-03 08:55:42.589770219 +0000 UTC m=+7780.876464025" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.406204 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.408753 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.420922 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.504041 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.504377 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8wgr\" (UniqueName: \"kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.504446 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.607131 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8wgr\" (UniqueName: \"kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.607525 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.608043 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.608152 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.608505 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.637578 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8wgr\" (UniqueName: \"kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr\") pod \"community-operators-7cnkr\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:43 crc kubenswrapper[4998]: I0203 08:55:43.733272 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:44 crc kubenswrapper[4998]: I0203 08:55:44.208698 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:44 crc kubenswrapper[4998]: I0203 08:55:44.523810 4998 generic.go:334] "Generic (PLEG): container finished" podID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerID="1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095" exitCode=0 Feb 03 08:55:44 crc kubenswrapper[4998]: I0203 08:55:44.523947 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerDied","Data":"1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095"} Feb 03 08:55:44 crc kubenswrapper[4998]: I0203 08:55:44.524000 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerStarted","Data":"ab40cd62865527da574d927c7fd958a12321ecb93e36e6662fd5c4bc787cacfe"} Feb 03 08:55:46 crc kubenswrapper[4998]: I0203 08:55:46.546087 4998 generic.go:334] "Generic (PLEG): container finished" podID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerID="52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed" exitCode=0 Feb 03 08:55:46 crc kubenswrapper[4998]: I0203 08:55:46.546179 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerDied","Data":"52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed"} Feb 03 08:55:47 crc kubenswrapper[4998]: I0203 08:55:47.579330 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerStarted","Data":"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416"} Feb 03 08:55:47 crc kubenswrapper[4998]: I0203 08:55:47.607279 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7cnkr" podStartSLOduration=2.114746064 podStartE2EDuration="4.607259126s" podCreationTimestamp="2026-02-03 08:55:43 +0000 UTC" firstStartedPulling="2026-02-03 08:55:44.526684375 +0000 UTC m=+7782.813378201" lastFinishedPulling="2026-02-03 08:55:47.019197457 +0000 UTC m=+7785.305891263" observedRunningTime="2026-02-03 08:55:47.603034976 +0000 UTC m=+7785.889728822" watchObservedRunningTime="2026-02-03 08:55:47.607259126 +0000 UTC m=+7785.893952932" Feb 03 08:55:50 crc kubenswrapper[4998]: I0203 08:55:50.101776 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:55:50 crc kubenswrapper[4998]: I0203 08:55:50.176166 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:55:50 crc kubenswrapper[4998]: I0203 08:55:50.176986 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7587496b69-crmkf" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="dnsmasq-dns" containerID="cri-o://306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c" gracePeriod=10 Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.136049 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.244601 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc\") pod \"f08be859-9c65-4eae-8359-3107b76fc9df\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.244689 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config\") pod \"f08be859-9c65-4eae-8359-3107b76fc9df\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.244811 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb\") pod \"f08be859-9c65-4eae-8359-3107b76fc9df\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.244886 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb\") pod \"f08be859-9c65-4eae-8359-3107b76fc9df\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.244974 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7npc4\" (UniqueName: \"kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4\") pod \"f08be859-9c65-4eae-8359-3107b76fc9df\" (UID: \"f08be859-9c65-4eae-8359-3107b76fc9df\") " Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.251119 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4" (OuterVolumeSpecName: "kube-api-access-7npc4") pod "f08be859-9c65-4eae-8359-3107b76fc9df" (UID: "f08be859-9c65-4eae-8359-3107b76fc9df"). InnerVolumeSpecName "kube-api-access-7npc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.285528 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config" (OuterVolumeSpecName: "config") pod "f08be859-9c65-4eae-8359-3107b76fc9df" (UID: "f08be859-9c65-4eae-8359-3107b76fc9df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.286421 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f08be859-9c65-4eae-8359-3107b76fc9df" (UID: "f08be859-9c65-4eae-8359-3107b76fc9df"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.298764 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f08be859-9c65-4eae-8359-3107b76fc9df" (UID: "f08be859-9c65-4eae-8359-3107b76fc9df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.306189 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f08be859-9c65-4eae-8359-3107b76fc9df" (UID: "f08be859-9c65-4eae-8359-3107b76fc9df"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.348033 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.348605 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.348738 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.348847 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7npc4\" (UniqueName: \"kubernetes.io/projected/f08be859-9c65-4eae-8359-3107b76fc9df-kube-api-access-7npc4\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.348933 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f08be859-9c65-4eae-8359-3107b76fc9df-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.616631 4998 generic.go:334] "Generic (PLEG): container finished" podID="f08be859-9c65-4eae-8359-3107b76fc9df" containerID="306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c" exitCode=0 Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.616724 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7587496b69-crmkf" event={"ID":"f08be859-9c65-4eae-8359-3107b76fc9df","Type":"ContainerDied","Data":"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c"} Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.616776 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7587496b69-crmkf" event={"ID":"f08be859-9c65-4eae-8359-3107b76fc9df","Type":"ContainerDied","Data":"281499c922dd231a2229aad79c3e627997a785c5a00aecdc7149c4373438f6c2"} Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.616862 4998 scope.go:117] "RemoveContainer" containerID="306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.617158 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7587496b69-crmkf" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.642982 4998 scope.go:117] "RemoveContainer" containerID="b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.668373 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.677601 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7587496b69-crmkf"] Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.691435 4998 scope.go:117] "RemoveContainer" containerID="306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c" Feb 03 08:55:51 crc kubenswrapper[4998]: E0203 08:55:51.691951 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c\": container with ID starting with 306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c not found: ID does not exist" containerID="306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.692026 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c"} err="failed to get container status \"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c\": rpc error: code = NotFound desc = could not find container \"306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c\": container with ID starting with 306e2f3df0997d678740f1a88e02da066580796a1f91cf0f158f87318ef5794c not found: ID does not exist" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.692068 4998 scope.go:117] "RemoveContainer" containerID="b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473" Feb 03 08:55:51 crc kubenswrapper[4998]: E0203 08:55:51.692472 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473\": container with ID starting with b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473 not found: ID does not exist" containerID="b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.692513 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473"} err="failed to get container status \"b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473\": rpc error: code = NotFound desc = could not find container \"b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473\": container with ID starting with b8aa189fd4a2d943e852b93405ac4290d52dd510a095ed5323126bfd8e51e473 not found: ID does not exist" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.835457 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:51 crc kubenswrapper[4998]: I0203 08:55:51.899417 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6ff6f58c8-vtpc6" Feb 03 08:55:52 crc kubenswrapper[4998]: I0203 08:55:52.448145 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" path="/var/lib/kubelet/pods/f08be859-9c65-4eae-8359-3107b76fc9df/volumes" Feb 03 08:55:53 crc kubenswrapper[4998]: I0203 08:55:53.734375 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:53 crc kubenswrapper[4998]: I0203 08:55:53.737440 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:53 crc kubenswrapper[4998]: I0203 08:55:53.776541 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:54 crc kubenswrapper[4998]: I0203 08:55:54.703640 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:54 crc kubenswrapper[4998]: I0203 08:55:54.755076 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:56 crc kubenswrapper[4998]: I0203 08:55:56.680898 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7cnkr" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="registry-server" containerID="cri-o://bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416" gracePeriod=2 Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.092698 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.262858 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content\") pod \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.262927 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities\") pod \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.263037 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8wgr\" (UniqueName: \"kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr\") pod \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\" (UID: \"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5\") " Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.263951 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities" (OuterVolumeSpecName: "utilities") pod "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" (UID: "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.268853 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr" (OuterVolumeSpecName: "kube-api-access-t8wgr") pod "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" (UID: "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5"). InnerVolumeSpecName "kube-api-access-t8wgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.360683 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" (UID: "a00c49a7-4e93-4a5d-b594-41d4fcdc46c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.364878 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.364915 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.364925 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8wgr\" (UniqueName: \"kubernetes.io/projected/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5-kube-api-access-t8wgr\") on node \"crc\" DevicePath \"\"" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.689613 4998 generic.go:334] "Generic (PLEG): container finished" podID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerID="bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416" exitCode=0 Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.689652 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerDied","Data":"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416"} Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.689677 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7cnkr" event={"ID":"a00c49a7-4e93-4a5d-b594-41d4fcdc46c5","Type":"ContainerDied","Data":"ab40cd62865527da574d927c7fd958a12321ecb93e36e6662fd5c4bc787cacfe"} Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.689692 4998 scope.go:117] "RemoveContainer" containerID="bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.689689 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7cnkr" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.730241 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.736410 4998 scope.go:117] "RemoveContainer" containerID="52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.739678 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7cnkr"] Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.757712 4998 scope.go:117] "RemoveContainer" containerID="1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.796072 4998 scope.go:117] "RemoveContainer" containerID="bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416" Feb 03 08:55:57 crc kubenswrapper[4998]: E0203 08:55:57.796542 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416\": container with ID starting with bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416 not found: ID does not exist" containerID="bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.796590 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416"} err="failed to get container status \"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416\": rpc error: code = NotFound desc = could not find container \"bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416\": container with ID starting with bd8a05904b034108ea6b31653b36f1719bf1122fc9ff27c1f0d5b65c0ef59416 not found: ID does not exist" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.796647 4998 scope.go:117] "RemoveContainer" containerID="52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed" Feb 03 08:55:57 crc kubenswrapper[4998]: E0203 08:55:57.797263 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed\": container with ID starting with 52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed not found: ID does not exist" containerID="52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.797307 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed"} err="failed to get container status \"52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed\": rpc error: code = NotFound desc = could not find container \"52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed\": container with ID starting with 52852da2970ef587fb6596fc851406c4b41904b66c95b507c2405184c40f6aed not found: ID does not exist" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.797339 4998 scope.go:117] "RemoveContainer" containerID="1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095" Feb 03 08:55:57 crc kubenswrapper[4998]: E0203 08:55:57.797664 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095\": container with ID starting with 1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095 not found: ID does not exist" containerID="1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095" Feb 03 08:55:57 crc kubenswrapper[4998]: I0203 08:55:57.797720 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095"} err="failed to get container status \"1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095\": rpc error: code = NotFound desc = could not find container \"1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095\": container with ID starting with 1f1e48233bbb9f48b333f2205aa751353c325d1b82e75c123e8fa3ac4048d095 not found: ID does not exist" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.359445 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qsjnb"] Feb 03 08:55:58 crc kubenswrapper[4998]: E0203 08:55:58.359927 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="extract-content" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.359948 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="extract-content" Feb 03 08:55:58 crc kubenswrapper[4998]: E0203 08:55:58.359974 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="registry-server" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.359981 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="registry-server" Feb 03 08:55:58 crc kubenswrapper[4998]: E0203 08:55:58.360007 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="dnsmasq-dns" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.360014 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="dnsmasq-dns" Feb 03 08:55:58 crc kubenswrapper[4998]: E0203 08:55:58.360028 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="extract-utilities" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.360036 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="extract-utilities" Feb 03 08:55:58 crc kubenswrapper[4998]: E0203 08:55:58.360052 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="init" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.360061 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="init" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.360260 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" containerName="registry-server" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.360289 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f08be859-9c65-4eae-8359-3107b76fc9df" containerName="dnsmasq-dns" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.361817 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.370013 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qsjnb"] Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.455508 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a00c49a7-4e93-4a5d-b594-41d4fcdc46c5" path="/var/lib/kubelet/pods/a00c49a7-4e93-4a5d-b594-41d4fcdc46c5/volumes" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.462651 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e543-account-create-update-g8nc2"] Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.464102 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.466078 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.470132 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e543-account-create-update-g8nc2"] Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.507765 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.507886 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g59v\" (UniqueName: \"kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.507944 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.508798 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btgwg\" (UniqueName: \"kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.609742 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.609841 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g59v\" (UniqueName: \"kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.609887 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.609944 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btgwg\" (UniqueName: \"kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.611527 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.616154 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.629176 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btgwg\" (UniqueName: \"kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg\") pod \"neutron-db-create-qsjnb\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.634525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g59v\" (UniqueName: \"kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v\") pod \"neutron-e543-account-create-update-g8nc2\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.720643 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qsjnb" Feb 03 08:55:58 crc kubenswrapper[4998]: I0203 08:55:58.785670 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:55:59 crc kubenswrapper[4998]: W0203 08:55:59.761150 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfae72d4e_5c7f_4a3a_b59f_bc6e42338e97.slice/crio-4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6 WatchSource:0}: Error finding container 4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6: Status 404 returned error can't find the container with id 4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6 Feb 03 08:55:59 crc kubenswrapper[4998]: I0203 08:55:59.765928 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qsjnb"] Feb 03 08:55:59 crc kubenswrapper[4998]: W0203 08:55:59.770092 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf86d75f0_cebd_4d4c_84b6_7350ff9ecd52.slice/crio-f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090 WatchSource:0}: Error finding container f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090: Status 404 returned error can't find the container with id f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090 Feb 03 08:55:59 crc kubenswrapper[4998]: I0203 08:55:59.780635 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e543-account-create-update-g8nc2"] Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.722490 4998 generic.go:334] "Generic (PLEG): container finished" podID="fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" containerID="8f9639a18e4113fbeb742ebf36081e3f4173a8bab979f62fb26e09c6141b158c" exitCode=0 Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.722568 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qsjnb" event={"ID":"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97","Type":"ContainerDied","Data":"8f9639a18e4113fbeb742ebf36081e3f4173a8bab979f62fb26e09c6141b158c"} Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.723000 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qsjnb" event={"ID":"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97","Type":"ContainerStarted","Data":"4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6"} Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.726153 4998 generic.go:334] "Generic (PLEG): container finished" podID="f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" containerID="308c95c905f9b61d093bef44cbdabd7f755e47b6921740866a7ece4f7927ad4a" exitCode=0 Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.726227 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e543-account-create-update-g8nc2" event={"ID":"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52","Type":"ContainerDied","Data":"308c95c905f9b61d093bef44cbdabd7f755e47b6921740866a7ece4f7927ad4a"} Feb 03 08:56:00 crc kubenswrapper[4998]: I0203 08:56:00.726269 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e543-account-create-update-g8nc2" event={"ID":"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52","Type":"ContainerStarted","Data":"f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090"} Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.244070 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.264282 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qsjnb" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.381433 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btgwg\" (UniqueName: \"kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg\") pod \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.381499 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g59v\" (UniqueName: \"kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v\") pod \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.381697 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts\") pod \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\" (UID: \"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52\") " Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.381760 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts\") pod \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\" (UID: \"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97\") " Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.382242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" (UID: "fae72d4e-5c7f-4a3a-b59f-bc6e42338e97"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.382636 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" (UID: "f86d75f0-cebd-4d4c-84b6-7350ff9ecd52"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.386870 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg" (OuterVolumeSpecName: "kube-api-access-btgwg") pod "fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" (UID: "fae72d4e-5c7f-4a3a-b59f-bc6e42338e97"). InnerVolumeSpecName "kube-api-access-btgwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.387088 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v" (OuterVolumeSpecName: "kube-api-access-7g59v") pod "f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" (UID: "f86d75f0-cebd-4d4c-84b6-7350ff9ecd52"). InnerVolumeSpecName "kube-api-access-7g59v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.484292 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.484345 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.484366 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btgwg\" (UniqueName: \"kubernetes.io/projected/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97-kube-api-access-btgwg\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.484384 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g59v\" (UniqueName: \"kubernetes.io/projected/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52-kube-api-access-7g59v\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.750173 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qsjnb" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.750168 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qsjnb" event={"ID":"fae72d4e-5c7f-4a3a-b59f-bc6e42338e97","Type":"ContainerDied","Data":"4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6"} Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.750831 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fa8f1af3abccf6aca14247fecdb46b055958930e61dff9f7ca9b4b53be218d6" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.760984 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e543-account-create-update-g8nc2" event={"ID":"f86d75f0-cebd-4d4c-84b6-7350ff9ecd52","Type":"ContainerDied","Data":"f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090"} Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.761882 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f19391b133ee744fc3328cf8b43d8dca770765c65b0f0164cf49d7c77f500090" Feb 03 08:56:02 crc kubenswrapper[4998]: I0203 08:56:02.761154 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e543-account-create-update-g8nc2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.658762 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-gz7j2"] Feb 03 08:56:03 crc kubenswrapper[4998]: E0203 08:56:03.659140 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" containerName="mariadb-database-create" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.659156 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" containerName="mariadb-database-create" Feb 03 08:56:03 crc kubenswrapper[4998]: E0203 08:56:03.659177 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" containerName="mariadb-account-create-update" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.659184 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" containerName="mariadb-account-create-update" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.659355 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" containerName="mariadb-account-create-update" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.659372 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" containerName="mariadb-database-create" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.660020 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.664014 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.664265 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.664657 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tlfvn" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.673431 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gz7j2"] Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.709615 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9jw9\" (UniqueName: \"kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.709716 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.709830 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.810402 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.810468 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.810546 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9jw9\" (UniqueName: \"kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.815511 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.815708 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.832403 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9jw9\" (UniqueName: \"kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9\") pod \"neutron-db-sync-gz7j2\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:03 crc kubenswrapper[4998]: I0203 08:56:03.981742 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:04 crc kubenswrapper[4998]: I0203 08:56:04.469668 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gz7j2"] Feb 03 08:56:04 crc kubenswrapper[4998]: I0203 08:56:04.778797 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gz7j2" event={"ID":"3f414436-f176-40ab-b8e6-6115625b5c66","Type":"ContainerStarted","Data":"a614b89c08dbceab2098023883c419fd3232ff80bb1465a266e95fa0f586a82c"} Feb 03 08:56:04 crc kubenswrapper[4998]: I0203 08:56:04.778852 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gz7j2" event={"ID":"3f414436-f176-40ab-b8e6-6115625b5c66","Type":"ContainerStarted","Data":"2064c2db83abc172dc8021902dee200e0205bb436472ca9c672e12144618add0"} Feb 03 08:56:04 crc kubenswrapper[4998]: I0203 08:56:04.793128 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-gz7j2" podStartSLOduration=1.793108057 podStartE2EDuration="1.793108057s" podCreationTimestamp="2026-02-03 08:56:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:56:04.791293946 +0000 UTC m=+7803.077987772" watchObservedRunningTime="2026-02-03 08:56:04.793108057 +0000 UTC m=+7803.079801873" Feb 03 08:56:08 crc kubenswrapper[4998]: I0203 08:56:08.811227 4998 generic.go:334] "Generic (PLEG): container finished" podID="3f414436-f176-40ab-b8e6-6115625b5c66" containerID="a614b89c08dbceab2098023883c419fd3232ff80bb1465a266e95fa0f586a82c" exitCode=0 Feb 03 08:56:08 crc kubenswrapper[4998]: I0203 08:56:08.811359 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gz7j2" event={"ID":"3f414436-f176-40ab-b8e6-6115625b5c66","Type":"ContainerDied","Data":"a614b89c08dbceab2098023883c419fd3232ff80bb1465a266e95fa0f586a82c"} Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.135705 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.245543 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9jw9\" (UniqueName: \"kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9\") pod \"3f414436-f176-40ab-b8e6-6115625b5c66\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.246030 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle\") pod \"3f414436-f176-40ab-b8e6-6115625b5c66\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.246081 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config\") pod \"3f414436-f176-40ab-b8e6-6115625b5c66\" (UID: \"3f414436-f176-40ab-b8e6-6115625b5c66\") " Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.254076 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9" (OuterVolumeSpecName: "kube-api-access-c9jw9") pod "3f414436-f176-40ab-b8e6-6115625b5c66" (UID: "3f414436-f176-40ab-b8e6-6115625b5c66"). InnerVolumeSpecName "kube-api-access-c9jw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.271003 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config" (OuterVolumeSpecName: "config") pod "3f414436-f176-40ab-b8e6-6115625b5c66" (UID: "3f414436-f176-40ab-b8e6-6115625b5c66"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.273154 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f414436-f176-40ab-b8e6-6115625b5c66" (UID: "3f414436-f176-40ab-b8e6-6115625b5c66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.348857 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.349214 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9jw9\" (UniqueName: \"kubernetes.io/projected/3f414436-f176-40ab-b8e6-6115625b5c66-kube-api-access-c9jw9\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.349232 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f414436-f176-40ab-b8e6-6115625b5c66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.830099 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gz7j2" event={"ID":"3f414436-f176-40ab-b8e6-6115625b5c66","Type":"ContainerDied","Data":"2064c2db83abc172dc8021902dee200e0205bb436472ca9c672e12144618add0"} Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.830156 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2064c2db83abc172dc8021902dee200e0205bb436472ca9c672e12144618add0" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.830162 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gz7j2" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.978122 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:56:10 crc kubenswrapper[4998]: E0203 08:56:10.978517 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f414436-f176-40ab-b8e6-6115625b5c66" containerName="neutron-db-sync" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.978538 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f414436-f176-40ab-b8e6-6115625b5c66" containerName="neutron-db-sync" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.978748 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f414436-f176-40ab-b8e6-6115625b5c66" containerName="neutron-db-sync" Feb 03 08:56:10 crc kubenswrapper[4998]: I0203 08:56:10.979848 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.003706 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.119426 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b8865847c-ztg5q"] Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.120981 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.124490 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.124744 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.124763 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tlfvn" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.135324 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8865847c-ztg5q"] Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.166686 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdcng\" (UniqueName: \"kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.167763 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.169171 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.169489 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.169947 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272189 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272497 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272545 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-combined-ca-bundle\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272573 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-httpd-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272592 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272618 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272669 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdcng\" (UniqueName: \"kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272701 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.272718 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn7rq\" (UniqueName: \"kubernetes.io/projected/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-kube-api-access-pn7rq\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.273656 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.274275 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.275965 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.276175 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.299430 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdcng\" (UniqueName: \"kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng\") pod \"dnsmasq-dns-5959fc5b85-b2wpj\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.304134 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.373821 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.373891 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-combined-ca-bundle\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.373950 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-httpd-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.374035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn7rq\" (UniqueName: \"kubernetes.io/projected/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-kube-api-access-pn7rq\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.378214 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.379042 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-httpd-config\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.379736 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-combined-ca-bundle\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.394060 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn7rq\" (UniqueName: \"kubernetes.io/projected/df70d98e-43b5-4a3f-98b8-57c2b577e1c8-kube-api-access-pn7rq\") pod \"neutron-b8865847c-ztg5q\" (UID: \"df70d98e-43b5-4a3f-98b8-57c2b577e1c8\") " pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.450242 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.834268 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:56:11 crc kubenswrapper[4998]: I0203 08:56:11.841513 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" event={"ID":"d14b3eea-7878-4cbd-9663-4568cb0d5cd4","Type":"ContainerStarted","Data":"d70a19ca76d054e9abee15c73f6861426537afce83284cfad43c5126cbb8c5bd"} Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.055048 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8865847c-ztg5q"] Feb 03 08:56:12 crc kubenswrapper[4998]: W0203 08:56:12.124405 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf70d98e_43b5_4a3f_98b8_57c2b577e1c8.slice/crio-e373b1668650020f36a625cfc34f278f0177f8614e883f4fe55f241cc8c23fe8 WatchSource:0}: Error finding container e373b1668650020f36a625cfc34f278f0177f8614e883f4fe55f241cc8c23fe8: Status 404 returned error can't find the container with id e373b1668650020f36a625cfc34f278f0177f8614e883f4fe55f241cc8c23fe8 Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.851750 4998 generic.go:334] "Generic (PLEG): container finished" podID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerID="3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b" exitCode=0 Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.851853 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" event={"ID":"d14b3eea-7878-4cbd-9663-4568cb0d5cd4","Type":"ContainerDied","Data":"3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b"} Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.854278 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8865847c-ztg5q" event={"ID":"df70d98e-43b5-4a3f-98b8-57c2b577e1c8","Type":"ContainerStarted","Data":"d7215d37bfb26c3b7d064b5bfeb517bad7db454cf7e282e9e6a2165672bb0a8a"} Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.854312 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8865847c-ztg5q" event={"ID":"df70d98e-43b5-4a3f-98b8-57c2b577e1c8","Type":"ContainerStarted","Data":"5443d12522bc6039c9b8ba5cef1b88efba83de24d09b45e91aaddcc419787a4f"} Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.854325 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8865847c-ztg5q" event={"ID":"df70d98e-43b5-4a3f-98b8-57c2b577e1c8","Type":"ContainerStarted","Data":"e373b1668650020f36a625cfc34f278f0177f8614e883f4fe55f241cc8c23fe8"} Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.854425 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:12 crc kubenswrapper[4998]: I0203 08:56:12.907392 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-b8865847c-ztg5q" podStartSLOduration=1.907351475 podStartE2EDuration="1.907351475s" podCreationTimestamp="2026-02-03 08:56:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:56:12.893187663 +0000 UTC m=+7811.179881489" watchObservedRunningTime="2026-02-03 08:56:12.907351475 +0000 UTC m=+7811.194045281" Feb 03 08:56:13 crc kubenswrapper[4998]: I0203 08:56:13.861747 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" event={"ID":"d14b3eea-7878-4cbd-9663-4568cb0d5cd4","Type":"ContainerStarted","Data":"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7"} Feb 03 08:56:13 crc kubenswrapper[4998]: I0203 08:56:13.890625 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" podStartSLOduration=3.890608388 podStartE2EDuration="3.890608388s" podCreationTimestamp="2026-02-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:56:13.881129059 +0000 UTC m=+7812.167822895" watchObservedRunningTime="2026-02-03 08:56:13.890608388 +0000 UTC m=+7812.177302194" Feb 03 08:56:14 crc kubenswrapper[4998]: I0203 08:56:14.870722 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:21 crc kubenswrapper[4998]: I0203 08:56:21.305600 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:56:21 crc kubenswrapper[4998]: I0203 08:56:21.423910 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:56:21 crc kubenswrapper[4998]: I0203 08:56:21.424379 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="dnsmasq-dns" containerID="cri-o://91d9dcda05fc7ab18eb488cffb7ab884cc8641cbe38e2630c178c8d2edf65282" gracePeriod=10 Feb 03 08:56:21 crc kubenswrapper[4998]: I0203 08:56:21.974311 4998 generic.go:334] "Generic (PLEG): container finished" podID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerID="91d9dcda05fc7ab18eb488cffb7ab884cc8641cbe38e2630c178c8d2edf65282" exitCode=0 Feb 03 08:56:21 crc kubenswrapper[4998]: I0203 08:56:21.974566 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" event={"ID":"a3c0768e-90fd-40da-90ff-67a8c5022d07","Type":"ContainerDied","Data":"91d9dcda05fc7ab18eb488cffb7ab884cc8641cbe38e2630c178c8d2edf65282"} Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.040195 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.180054 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config\") pod \"a3c0768e-90fd-40da-90ff-67a8c5022d07\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.180150 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsvpp\" (UniqueName: \"kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp\") pod \"a3c0768e-90fd-40da-90ff-67a8c5022d07\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.180216 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb\") pod \"a3c0768e-90fd-40da-90ff-67a8c5022d07\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.180248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc\") pod \"a3c0768e-90fd-40da-90ff-67a8c5022d07\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.180303 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb\") pod \"a3c0768e-90fd-40da-90ff-67a8c5022d07\" (UID: \"a3c0768e-90fd-40da-90ff-67a8c5022d07\") " Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.191986 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp" (OuterVolumeSpecName: "kube-api-access-qsvpp") pod "a3c0768e-90fd-40da-90ff-67a8c5022d07" (UID: "a3c0768e-90fd-40da-90ff-67a8c5022d07"). InnerVolumeSpecName "kube-api-access-qsvpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.233414 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3c0768e-90fd-40da-90ff-67a8c5022d07" (UID: "a3c0768e-90fd-40da-90ff-67a8c5022d07"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.238382 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3c0768e-90fd-40da-90ff-67a8c5022d07" (UID: "a3c0768e-90fd-40da-90ff-67a8c5022d07"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.249312 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3c0768e-90fd-40da-90ff-67a8c5022d07" (UID: "a3c0768e-90fd-40da-90ff-67a8c5022d07"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.251850 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config" (OuterVolumeSpecName: "config") pod "a3c0768e-90fd-40da-90ff-67a8c5022d07" (UID: "a3c0768e-90fd-40da-90ff-67a8c5022d07"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.283640 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.283685 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsvpp\" (UniqueName: \"kubernetes.io/projected/a3c0768e-90fd-40da-90ff-67a8c5022d07-kube-api-access-qsvpp\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.283700 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.283712 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.283724 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c0768e-90fd-40da-90ff-67a8c5022d07-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.990073 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" event={"ID":"a3c0768e-90fd-40da-90ff-67a8c5022d07","Type":"ContainerDied","Data":"90c4ff50f252c3fc90da058c3ddd4ee44459ad0f8aa927e488caa21e46beb7e4"} Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.990356 4998 scope.go:117] "RemoveContainer" containerID="91d9dcda05fc7ab18eb488cffb7ab884cc8641cbe38e2630c178c8d2edf65282" Feb 03 08:56:22 crc kubenswrapper[4998]: I0203 08:56:22.990521 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55df84ddd5-j4nh8" Feb 03 08:56:23 crc kubenswrapper[4998]: I0203 08:56:23.016113 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:56:23 crc kubenswrapper[4998]: I0203 08:56:23.018082 4998 scope.go:117] "RemoveContainer" containerID="ec05834b227957cc620089f1a6c675ccebead4c58326835d00ca5a118471bf9a" Feb 03 08:56:23 crc kubenswrapper[4998]: I0203 08:56:23.034543 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55df84ddd5-j4nh8"] Feb 03 08:56:24 crc kubenswrapper[4998]: I0203 08:56:24.441110 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" path="/var/lib/kubelet/pods/a3c0768e-90fd-40da-90ff-67a8c5022d07/volumes" Feb 03 08:56:41 crc kubenswrapper[4998]: I0203 08:56:41.467355 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-b8865847c-ztg5q" Feb 03 08:56:42 crc kubenswrapper[4998]: I0203 08:56:42.755044 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:56:42 crc kubenswrapper[4998]: I0203 08:56:42.755377 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.034276 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:56:50 crc kubenswrapper[4998]: E0203 08:56:50.035171 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="dnsmasq-dns" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.035185 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="dnsmasq-dns" Feb 03 08:56:50 crc kubenswrapper[4998]: E0203 08:56:50.035204 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="init" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.035210 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="init" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.035399 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3c0768e-90fd-40da-90ff-67a8c5022d07" containerName="dnsmasq-dns" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.036347 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.043884 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-b9slq"] Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.044961 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.050120 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.052302 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.058940 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.058964 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-h9b54" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.059179 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.060489 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.079100 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-b9slq"] Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.201984 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202060 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202122 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202153 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202176 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8cr4\" (UniqueName: \"kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202383 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202455 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202837 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202883 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.202910 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qjcx\" (UniqueName: \"kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.203102 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.203144 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304440 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304832 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qjcx\" (UniqueName: \"kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304856 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304913 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304935 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304958 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.304991 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305036 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305054 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305070 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8cr4\" (UniqueName: \"kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305111 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305126 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.305743 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.306322 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.306590 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.312505 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.312820 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.313911 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.313911 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.316374 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.316595 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.317018 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.326588 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qjcx\" (UniqueName: \"kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx\") pod \"dnsmasq-dns-6464cf648f-j45ff\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.331940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8cr4\" (UniqueName: \"kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4\") pod \"swift-ring-rebalance-b9slq\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.375333 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.384105 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.842538 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:56:50 crc kubenswrapper[4998]: I0203 08:56:50.924275 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-b9slq"] Feb 03 08:56:51 crc kubenswrapper[4998]: I0203 08:56:51.256140 4998 generic.go:334] "Generic (PLEG): container finished" podID="45377673-bf8c-4cfe-af9a-15c68705994f" containerID="a32a340594b881eb8501a2057ca5cc99672e5a3fbe5aa644451602cc27282ea5" exitCode=0 Feb 03 08:56:51 crc kubenswrapper[4998]: I0203 08:56:51.256351 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" event={"ID":"45377673-bf8c-4cfe-af9a-15c68705994f","Type":"ContainerDied","Data":"a32a340594b881eb8501a2057ca5cc99672e5a3fbe5aa644451602cc27282ea5"} Feb 03 08:56:51 crc kubenswrapper[4998]: I0203 08:56:51.256545 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" event={"ID":"45377673-bf8c-4cfe-af9a-15c68705994f","Type":"ContainerStarted","Data":"8665024ab03f85f2f564146333e10139af2ed09d89f839a4066ce8c5444975ad"} Feb 03 08:56:51 crc kubenswrapper[4998]: I0203 08:56:51.259217 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b9slq" event={"ID":"70c13234-a910-458e-9013-94c7269a2984","Type":"ContainerStarted","Data":"edf05937506fab0e94e96c051b7a0e7f7234f34307e9c116665c7d42063790a9"} Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.276666 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" event={"ID":"45377673-bf8c-4cfe-af9a-15c68705994f","Type":"ContainerStarted","Data":"b7bed04c1ea177b81630aab76718c0f2907612178db1ed57b1f24cf90460f077"} Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.278396 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.305663 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" podStartSLOduration=3.305643919 podStartE2EDuration="3.305643919s" podCreationTimestamp="2026-02-03 08:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:56:52.301318687 +0000 UTC m=+7850.588012513" watchObservedRunningTime="2026-02-03 08:56:52.305643919 +0000 UTC m=+7850.592337725" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.542594 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-768dd68f4d-g2qrv"] Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.545559 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.548648 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.587617 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-768dd68f4d-g2qrv"] Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.653849 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-combined-ca-bundle\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.653905 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-etc-swift\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.654007 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-config-data\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.654063 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l6vt\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-kube-api-access-6l6vt\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.654098 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-run-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.654123 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-log-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.755852 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-combined-ca-bundle\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.755920 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-etc-swift\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.756030 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-config-data\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.756079 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l6vt\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-kube-api-access-6l6vt\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.756123 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-run-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.756157 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-log-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.757410 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-log-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.759044 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f64949be-8e32-451e-a626-98930fa6b3a2-run-httpd\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.762027 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-combined-ca-bundle\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.762182 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64949be-8e32-451e-a626-98930fa6b3a2-config-data\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.762410 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-etc-swift\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.774644 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l6vt\" (UniqueName: \"kubernetes.io/projected/f64949be-8e32-451e-a626-98930fa6b3a2-kube-api-access-6l6vt\") pod \"swift-proxy-768dd68f4d-g2qrv\" (UID: \"f64949be-8e32-451e-a626-98930fa6b3a2\") " pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:52 crc kubenswrapper[4998]: I0203 08:56:52.887464 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:53 crc kubenswrapper[4998]: I0203 08:56:53.614126 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-768dd68f4d-g2qrv"] Feb 03 08:56:54 crc kubenswrapper[4998]: W0203 08:56:54.315163 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf64949be_8e32_451e_a626_98930fa6b3a2.slice/crio-5080afc5cab380ab402824f29215420b7fa16175b3a4e0020c78f98654df1782 WatchSource:0}: Error finding container 5080afc5cab380ab402824f29215420b7fa16175b3a4e0020c78f98654df1782: Status 404 returned error can't find the container with id 5080afc5cab380ab402824f29215420b7fa16175b3a4e0020c78f98654df1782 Feb 03 08:56:55 crc kubenswrapper[4998]: I0203 08:56:55.299703 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-768dd68f4d-g2qrv" event={"ID":"f64949be-8e32-451e-a626-98930fa6b3a2","Type":"ContainerStarted","Data":"5080afc5cab380ab402824f29215420b7fa16175b3a4e0020c78f98654df1782"} Feb 03 08:56:56 crc kubenswrapper[4998]: I0203 08:56:56.309328 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-768dd68f4d-g2qrv" event={"ID":"f64949be-8e32-451e-a626-98930fa6b3a2","Type":"ContainerStarted","Data":"49d9dbb0da1e727b5f8a6fbc8497ce4ba686dc04fc133831d7299f3174ddf761"} Feb 03 08:56:56 crc kubenswrapper[4998]: I0203 08:56:56.311316 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b9slq" event={"ID":"70c13234-a910-458e-9013-94c7269a2984","Type":"ContainerStarted","Data":"e5381ef1c527e2b79c23ca3aae775ea3d5e3e97e5b345f52d60a3ca969ca3bc2"} Feb 03 08:56:56 crc kubenswrapper[4998]: I0203 08:56:56.332846 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-b9slq" podStartSLOduration=2.389135797 podStartE2EDuration="7.332828613s" podCreationTimestamp="2026-02-03 08:56:49 +0000 UTC" firstStartedPulling="2026-02-03 08:56:50.938216294 +0000 UTC m=+7849.224910100" lastFinishedPulling="2026-02-03 08:56:55.88190911 +0000 UTC m=+7854.168602916" observedRunningTime="2026-02-03 08:56:56.325496165 +0000 UTC m=+7854.612189971" watchObservedRunningTime="2026-02-03 08:56:56.332828613 +0000 UTC m=+7854.619522409" Feb 03 08:56:57 crc kubenswrapper[4998]: I0203 08:56:57.321861 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-768dd68f4d-g2qrv" event={"ID":"f64949be-8e32-451e-a626-98930fa6b3a2","Type":"ContainerStarted","Data":"7185809b2533a6190fd5cf5d1ee6d2ec3518c9455b36f2e4d3e947a8eb5e3479"} Feb 03 08:56:57 crc kubenswrapper[4998]: I0203 08:56:57.322464 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:57 crc kubenswrapper[4998]: I0203 08:56:57.322489 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:56:57 crc kubenswrapper[4998]: I0203 08:56:57.354695 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-768dd68f4d-g2qrv" podStartSLOduration=3.746898071 podStartE2EDuration="5.354670084s" podCreationTimestamp="2026-02-03 08:56:52 +0000 UTC" firstStartedPulling="2026-02-03 08:56:54.318061783 +0000 UTC m=+7852.604755589" lastFinishedPulling="2026-02-03 08:56:55.925833796 +0000 UTC m=+7854.212527602" observedRunningTime="2026-02-03 08:56:57.353078408 +0000 UTC m=+7855.639772234" watchObservedRunningTime="2026-02-03 08:56:57.354670084 +0000 UTC m=+7855.641363890" Feb 03 08:57:00 crc kubenswrapper[4998]: I0203 08:57:00.378037 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:57:00 crc kubenswrapper[4998]: I0203 08:57:00.454555 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:57:00 crc kubenswrapper[4998]: I0203 08:57:00.454894 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="dnsmasq-dns" containerID="cri-o://c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7" gracePeriod=10 Feb 03 08:57:00 crc kubenswrapper[4998]: E0203 08:57:00.605977 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70c13234_a910_458e_9013_94c7269a2984.slice/crio-e5381ef1c527e2b79c23ca3aae775ea3d5e3e97e5b345f52d60a3ca969ca3bc2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd14b3eea_7878_4cbd_9663_4568cb0d5cd4.slice/crio-conmon-c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7.scope\": RecentStats: unable to find data in memory cache]" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.001572 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.021907 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdcng\" (UniqueName: \"kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng\") pod \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.021990 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb\") pod \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.022013 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc\") pod \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.022140 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb\") pod \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.022179 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config\") pod \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\" (UID: \"d14b3eea-7878-4cbd-9663-4568cb0d5cd4\") " Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.027865 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng" (OuterVolumeSpecName: "kube-api-access-jdcng") pod "d14b3eea-7878-4cbd-9663-4568cb0d5cd4" (UID: "d14b3eea-7878-4cbd-9663-4568cb0d5cd4"). InnerVolumeSpecName "kube-api-access-jdcng". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.111016 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config" (OuterVolumeSpecName: "config") pod "d14b3eea-7878-4cbd-9663-4568cb0d5cd4" (UID: "d14b3eea-7878-4cbd-9663-4568cb0d5cd4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.114231 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d14b3eea-7878-4cbd-9663-4568cb0d5cd4" (UID: "d14b3eea-7878-4cbd-9663-4568cb0d5cd4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.118305 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d14b3eea-7878-4cbd-9663-4568cb0d5cd4" (UID: "d14b3eea-7878-4cbd-9663-4568cb0d5cd4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.120436 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d14b3eea-7878-4cbd-9663-4568cb0d5cd4" (UID: "d14b3eea-7878-4cbd-9663-4568cb0d5cd4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.124078 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.124184 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.124255 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.124316 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.124368 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdcng\" (UniqueName: \"kubernetes.io/projected/d14b3eea-7878-4cbd-9663-4568cb0d5cd4-kube-api-access-jdcng\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.356016 4998 generic.go:334] "Generic (PLEG): container finished" podID="70c13234-a910-458e-9013-94c7269a2984" containerID="e5381ef1c527e2b79c23ca3aae775ea3d5e3e97e5b345f52d60a3ca969ca3bc2" exitCode=0 Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.356064 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b9slq" event={"ID":"70c13234-a910-458e-9013-94c7269a2984","Type":"ContainerDied","Data":"e5381ef1c527e2b79c23ca3aae775ea3d5e3e97e5b345f52d60a3ca969ca3bc2"} Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.359056 4998 generic.go:334] "Generic (PLEG): container finished" podID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerID="c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7" exitCode=0 Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.359082 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" event={"ID":"d14b3eea-7878-4cbd-9663-4568cb0d5cd4","Type":"ContainerDied","Data":"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7"} Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.359107 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" event={"ID":"d14b3eea-7878-4cbd-9663-4568cb0d5cd4","Type":"ContainerDied","Data":"d70a19ca76d054e9abee15c73f6861426537afce83284cfad43c5126cbb8c5bd"} Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.359122 4998 scope.go:117] "RemoveContainer" containerID="c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.359228 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959fc5b85-b2wpj" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.422458 4998 scope.go:117] "RemoveContainer" containerID="3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.429139 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.438240 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959fc5b85-b2wpj"] Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.443467 4998 scope.go:117] "RemoveContainer" containerID="c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7" Feb 03 08:57:01 crc kubenswrapper[4998]: E0203 08:57:01.443884 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7\": container with ID starting with c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7 not found: ID does not exist" containerID="c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.443939 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7"} err="failed to get container status \"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7\": rpc error: code = NotFound desc = could not find container \"c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7\": container with ID starting with c4819ee580f4f8ec8a8c97d07530eeb59d1e21b0ff2fc115bad5fadfe06beaf7 not found: ID does not exist" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.443971 4998 scope.go:117] "RemoveContainer" containerID="3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b" Feb 03 08:57:01 crc kubenswrapper[4998]: E0203 08:57:01.444436 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b\": container with ID starting with 3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b not found: ID does not exist" containerID="3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b" Feb 03 08:57:01 crc kubenswrapper[4998]: I0203 08:57:01.444486 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b"} err="failed to get container status \"3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b\": rpc error: code = NotFound desc = could not find container \"3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b\": container with ID starting with 3abf4b1b2e84f0f6daa995823d1feba99221a962f1185075b57fc4bf6570062b not found: ID does not exist" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.438946 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" path="/var/lib/kubelet/pods/d14b3eea-7878-4cbd-9663-4568cb0d5cd4/volumes" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.724156 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765350 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8cr4\" (UniqueName: \"kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765498 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765521 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765547 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765578 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765616 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.765703 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle\") pod \"70c13234-a910-458e-9013-94c7269a2984\" (UID: \"70c13234-a910-458e-9013-94c7269a2984\") " Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.766013 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.766145 4998 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.766747 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.771233 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4" (OuterVolumeSpecName: "kube-api-access-g8cr4") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "kube-api-access-g8cr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.775078 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.794886 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.796768 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts" (OuterVolumeSpecName: "scripts") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.799578 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "70c13234-a910-458e-9013-94c7269a2984" (UID: "70c13234-a910-458e-9013-94c7269a2984"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867881 4998 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/70c13234-a910-458e-9013-94c7269a2984-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867911 4998 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867920 4998 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867928 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70c13234-a910-458e-9013-94c7269a2984-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867936 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c13234-a910-458e-9013-94c7269a2984-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.867945 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8cr4\" (UniqueName: \"kubernetes.io/projected/70c13234-a910-458e-9013-94c7269a2984-kube-api-access-g8cr4\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.890301 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:57:02 crc kubenswrapper[4998]: I0203 08:57:02.896313 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-768dd68f4d-g2qrv" Feb 03 08:57:03 crc kubenswrapper[4998]: I0203 08:57:03.379595 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b9slq" event={"ID":"70c13234-a910-458e-9013-94c7269a2984","Type":"ContainerDied","Data":"edf05937506fab0e94e96c051b7a0e7f7234f34307e9c116665c7d42063790a9"} Feb 03 08:57:03 crc kubenswrapper[4998]: I0203 08:57:03.380214 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edf05937506fab0e94e96c051b7a0e7f7234f34307e9c116665c7d42063790a9" Feb 03 08:57:03 crc kubenswrapper[4998]: I0203 08:57:03.380363 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b9slq" Feb 03 08:57:07 crc kubenswrapper[4998]: E0203 08:57:07.583123 4998 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.129:55326->38.102.83.129:45201: write tcp 38.102.83.129:55326->38.102.83.129:45201: write: connection reset by peer Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.942384 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-g6h6c"] Feb 03 08:57:08 crc kubenswrapper[4998]: E0203 08:57:08.943095 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="init" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943110 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="init" Feb 03 08:57:08 crc kubenswrapper[4998]: E0203 08:57:08.943126 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="dnsmasq-dns" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943132 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="dnsmasq-dns" Feb 03 08:57:08 crc kubenswrapper[4998]: E0203 08:57:08.943153 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70c13234-a910-458e-9013-94c7269a2984" containerName="swift-ring-rebalance" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943159 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="70c13234-a910-458e-9013-94c7269a2984" containerName="swift-ring-rebalance" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943348 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="d14b3eea-7878-4cbd-9663-4568cb0d5cd4" containerName="dnsmasq-dns" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943378 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="70c13234-a910-458e-9013-94c7269a2984" containerName="swift-ring-rebalance" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.943969 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.954465 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-g6h6c"] Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.979444 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:08 crc kubenswrapper[4998]: I0203 08:57:08.979597 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vq29\" (UniqueName: \"kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.047212 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-71df-account-create-update-cgzrb"] Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.048625 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.051078 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.057659 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-71df-account-create-update-cgzrb"] Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.080981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.081085 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.081180 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj8bp\" (UniqueName: \"kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.081260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vq29\" (UniqueName: \"kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.082077 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.100828 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vq29\" (UniqueName: \"kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29\") pod \"cinder-db-create-g6h6c\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.183388 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj8bp\" (UniqueName: \"kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.183548 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.184328 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.199527 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj8bp\" (UniqueName: \"kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp\") pod \"cinder-71df-account-create-update-cgzrb\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.265907 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.365692 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.753096 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-g6h6c"] Feb 03 08:57:09 crc kubenswrapper[4998]: I0203 08:57:09.831879 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-71df-account-create-update-cgzrb"] Feb 03 08:57:09 crc kubenswrapper[4998]: W0203 08:57:09.833336 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88e180ca_ddca_470a_9777_152fd462fc8b.slice/crio-4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6 WatchSource:0}: Error finding container 4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6: Status 404 returned error can't find the container with id 4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6 Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.441512 4998 generic.go:334] "Generic (PLEG): container finished" podID="3989a09a-1771-44a0-b2e4-32cf6683215b" containerID="9fe4ee232680b2064b5d5b5af07787a530bff7fe8a504f89061cfe01348795cd" exitCode=0 Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.441584 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-g6h6c" event={"ID":"3989a09a-1771-44a0-b2e4-32cf6683215b","Type":"ContainerDied","Data":"9fe4ee232680b2064b5d5b5af07787a530bff7fe8a504f89061cfe01348795cd"} Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.441611 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-g6h6c" event={"ID":"3989a09a-1771-44a0-b2e4-32cf6683215b","Type":"ContainerStarted","Data":"b60710ad3fdb0491e05aa2e66a1cf529d41f4e4050c4fd77cce48365fc1bcb3c"} Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.445405 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-71df-account-create-update-cgzrb" event={"ID":"88e180ca-ddca-470a-9777-152fd462fc8b","Type":"ContainerStarted","Data":"bf498f4b9b405e769594b3781e6efa028ea2c250195e5ba629ce57e1a62e7363"} Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.445567 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-71df-account-create-update-cgzrb" event={"ID":"88e180ca-ddca-470a-9777-152fd462fc8b","Type":"ContainerStarted","Data":"4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6"} Feb 03 08:57:10 crc kubenswrapper[4998]: I0203 08:57:10.474555 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-71df-account-create-update-cgzrb" podStartSLOduration=1.474539291 podStartE2EDuration="1.474539291s" podCreationTimestamp="2026-02-03 08:57:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:57:10.470720673 +0000 UTC m=+7868.757414509" watchObservedRunningTime="2026-02-03 08:57:10.474539291 +0000 UTC m=+7868.761233097" Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.456052 4998 generic.go:334] "Generic (PLEG): container finished" podID="88e180ca-ddca-470a-9777-152fd462fc8b" containerID="bf498f4b9b405e769594b3781e6efa028ea2c250195e5ba629ce57e1a62e7363" exitCode=0 Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.456135 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-71df-account-create-update-cgzrb" event={"ID":"88e180ca-ddca-470a-9777-152fd462fc8b","Type":"ContainerDied","Data":"bf498f4b9b405e769594b3781e6efa028ea2c250195e5ba629ce57e1a62e7363"} Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.798389 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.829804 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts\") pod \"3989a09a-1771-44a0-b2e4-32cf6683215b\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.830020 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vq29\" (UniqueName: \"kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29\") pod \"3989a09a-1771-44a0-b2e4-32cf6683215b\" (UID: \"3989a09a-1771-44a0-b2e4-32cf6683215b\") " Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.830666 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3989a09a-1771-44a0-b2e4-32cf6683215b" (UID: "3989a09a-1771-44a0-b2e4-32cf6683215b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.835677 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29" (OuterVolumeSpecName: "kube-api-access-4vq29") pod "3989a09a-1771-44a0-b2e4-32cf6683215b" (UID: "3989a09a-1771-44a0-b2e4-32cf6683215b"). InnerVolumeSpecName "kube-api-access-4vq29". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.932413 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vq29\" (UniqueName: \"kubernetes.io/projected/3989a09a-1771-44a0-b2e4-32cf6683215b-kube-api-access-4vq29\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:11 crc kubenswrapper[4998]: I0203 08:57:11.932480 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3989a09a-1771-44a0-b2e4-32cf6683215b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.466069 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-g6h6c" event={"ID":"3989a09a-1771-44a0-b2e4-32cf6683215b","Type":"ContainerDied","Data":"b60710ad3fdb0491e05aa2e66a1cf529d41f4e4050c4fd77cce48365fc1bcb3c"} Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.466115 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b60710ad3fdb0491e05aa2e66a1cf529d41f4e4050c4fd77cce48365fc1bcb3c" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.466106 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-g6h6c" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.754475 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.755159 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.812604 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.847244 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts\") pod \"88e180ca-ddca-470a-9777-152fd462fc8b\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.847401 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj8bp\" (UniqueName: \"kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp\") pod \"88e180ca-ddca-470a-9777-152fd462fc8b\" (UID: \"88e180ca-ddca-470a-9777-152fd462fc8b\") " Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.847898 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "88e180ca-ddca-470a-9777-152fd462fc8b" (UID: "88e180ca-ddca-470a-9777-152fd462fc8b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.848077 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88e180ca-ddca-470a-9777-152fd462fc8b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.850660 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp" (OuterVolumeSpecName: "kube-api-access-bj8bp") pod "88e180ca-ddca-470a-9777-152fd462fc8b" (UID: "88e180ca-ddca-470a-9777-152fd462fc8b"). InnerVolumeSpecName "kube-api-access-bj8bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:12 crc kubenswrapper[4998]: I0203 08:57:12.949589 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj8bp\" (UniqueName: \"kubernetes.io/projected/88e180ca-ddca-470a-9777-152fd462fc8b-kube-api-access-bj8bp\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:13 crc kubenswrapper[4998]: I0203 08:57:13.475021 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-71df-account-create-update-cgzrb" event={"ID":"88e180ca-ddca-470a-9777-152fd462fc8b","Type":"ContainerDied","Data":"4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6"} Feb 03 08:57:13 crc kubenswrapper[4998]: I0203 08:57:13.475066 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b2b2d2e4f416c78402c877e82c83cdd9fac5a6ff3f1d69477a511923b9b38e6" Feb 03 08:57:13 crc kubenswrapper[4998]: I0203 08:57:13.475071 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-71df-account-create-update-cgzrb" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.264084 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-lv7qc"] Feb 03 08:57:14 crc kubenswrapper[4998]: E0203 08:57:14.264468 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88e180ca-ddca-470a-9777-152fd462fc8b" containerName="mariadb-account-create-update" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.264484 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="88e180ca-ddca-470a-9777-152fd462fc8b" containerName="mariadb-account-create-update" Feb 03 08:57:14 crc kubenswrapper[4998]: E0203 08:57:14.264497 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3989a09a-1771-44a0-b2e4-32cf6683215b" containerName="mariadb-database-create" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.264504 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3989a09a-1771-44a0-b2e4-32cf6683215b" containerName="mariadb-database-create" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.264664 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3989a09a-1771-44a0-b2e4-32cf6683215b" containerName="mariadb-database-create" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.264689 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="88e180ca-ddca-470a-9777-152fd462fc8b" containerName="mariadb-account-create-update" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.265284 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.267532 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-t5n59" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.268384 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.273436 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.277671 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-lv7qc"] Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376399 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376646 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376696 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376747 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376770 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmrqt\" (UniqueName: \"kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.376937 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478295 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478341 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478399 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478451 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478471 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmrqt\" (UniqueName: \"kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478497 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.478560 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.484385 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.488990 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.489180 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.492938 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.499674 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmrqt\" (UniqueName: \"kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt\") pod \"cinder-db-sync-lv7qc\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:14 crc kubenswrapper[4998]: I0203 08:57:14.586802 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:15 crc kubenswrapper[4998]: I0203 08:57:15.033567 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-lv7qc"] Feb 03 08:57:15 crc kubenswrapper[4998]: I0203 08:57:15.497286 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-lv7qc" event={"ID":"2ec901bf-03bf-4799-acc8-095e4196e78a","Type":"ContainerStarted","Data":"161c2c88138fc3807400c9dbb480dcc679fef9e5690bccee76112f3625d7c3c7"} Feb 03 08:57:32 crc kubenswrapper[4998]: I0203 08:57:32.659860 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-lv7qc" event={"ID":"2ec901bf-03bf-4799-acc8-095e4196e78a","Type":"ContainerStarted","Data":"8a40473c459da532d3dfc96afffb93bbf13a0a6ee5ba9dc18230bd0b6ba357e8"} Feb 03 08:57:32 crc kubenswrapper[4998]: I0203 08:57:32.676585 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-lv7qc" podStartSLOduration=1.681717303 podStartE2EDuration="18.676545625s" podCreationTimestamp="2026-02-03 08:57:14 +0000 UTC" firstStartedPulling="2026-02-03 08:57:15.03976667 +0000 UTC m=+7873.326460476" lastFinishedPulling="2026-02-03 08:57:32.034594992 +0000 UTC m=+7890.321288798" observedRunningTime="2026-02-03 08:57:32.675111394 +0000 UTC m=+7890.961805220" watchObservedRunningTime="2026-02-03 08:57:32.676545625 +0000 UTC m=+7890.963239431" Feb 03 08:57:35 crc kubenswrapper[4998]: I0203 08:57:35.694174 4998 generic.go:334] "Generic (PLEG): container finished" podID="2ec901bf-03bf-4799-acc8-095e4196e78a" containerID="8a40473c459da532d3dfc96afffb93bbf13a0a6ee5ba9dc18230bd0b6ba357e8" exitCode=0 Feb 03 08:57:35 crc kubenswrapper[4998]: I0203 08:57:35.694295 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-lv7qc" event={"ID":"2ec901bf-03bf-4799-acc8-095e4196e78a","Type":"ContainerDied","Data":"8a40473c459da532d3dfc96afffb93bbf13a0a6ee5ba9dc18230bd0b6ba357e8"} Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.056726 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.095578 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmrqt\" (UniqueName: \"kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.095671 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.095817 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.096718 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.096939 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.097072 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id\") pod \"2ec901bf-03bf-4799-acc8-095e4196e78a\" (UID: \"2ec901bf-03bf-4799-acc8-095e4196e78a\") " Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.097350 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.097697 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ec901bf-03bf-4799-acc8-095e4196e78a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.101290 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts" (OuterVolumeSpecName: "scripts") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.101489 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt" (OuterVolumeSpecName: "kube-api-access-zmrqt") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "kube-api-access-zmrqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.101994 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.130670 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.146613 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data" (OuterVolumeSpecName: "config-data") pod "2ec901bf-03bf-4799-acc8-095e4196e78a" (UID: "2ec901bf-03bf-4799-acc8-095e4196e78a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.199500 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmrqt\" (UniqueName: \"kubernetes.io/projected/2ec901bf-03bf-4799-acc8-095e4196e78a-kube-api-access-zmrqt\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.199549 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.199562 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.199574 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.199585 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ec901bf-03bf-4799-acc8-095e4196e78a-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.715041 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-lv7qc" event={"ID":"2ec901bf-03bf-4799-acc8-095e4196e78a","Type":"ContainerDied","Data":"161c2c88138fc3807400c9dbb480dcc679fef9e5690bccee76112f3625d7c3c7"} Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.715118 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="161c2c88138fc3807400c9dbb480dcc679fef9e5690bccee76112f3625d7c3c7" Feb 03 08:57:37 crc kubenswrapper[4998]: I0203 08:57:37.715212 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-lv7qc" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.115998 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:57:38 crc kubenswrapper[4998]: E0203 08:57:38.116359 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ec901bf-03bf-4799-acc8-095e4196e78a" containerName="cinder-db-sync" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.116372 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ec901bf-03bf-4799-acc8-095e4196e78a" containerName="cinder-db-sync" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.116518 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ec901bf-03bf-4799-acc8-095e4196e78a" containerName="cinder-db-sync" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.117388 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.138669 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.216989 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.217096 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.217124 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.217197 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grbb4\" (UniqueName: \"kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.217328 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.269156 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.270567 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.272901 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.272977 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.272996 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.273344 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-t5n59" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.289665 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319093 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319172 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319206 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319234 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319260 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319285 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319319 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319375 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grbb4\" (UniqueName: \"kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319419 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwbbt\" (UniqueName: \"kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319441 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319468 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.319490 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.320573 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.321393 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.321800 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.322043 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.354525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grbb4\" (UniqueName: \"kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4\") pod \"dnsmasq-dns-78c876cf45-ptl75\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420579 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420626 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420645 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420688 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420740 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwbbt\" (UniqueName: \"kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420746 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420761 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.420783 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.421348 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.424397 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.425797 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.428213 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.433729 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.437551 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.440384 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwbbt\" (UniqueName: \"kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt\") pod \"cinder-api-0\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.585691 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:57:38 crc kubenswrapper[4998]: I0203 08:57:38.982367 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:57:39 crc kubenswrapper[4998]: I0203 08:57:39.122820 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:57:39 crc kubenswrapper[4998]: W0203 08:57:39.129176 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8abf893_b316_43cc_a2dd_d3a5f14c7f98.slice/crio-34a9918254ccaa5093acf0d01685758eb798c972dddeea8810740b1c1575bcad WatchSource:0}: Error finding container 34a9918254ccaa5093acf0d01685758eb798c972dddeea8810740b1c1575bcad: Status 404 returned error can't find the container with id 34a9918254ccaa5093acf0d01685758eb798c972dddeea8810740b1c1575bcad Feb 03 08:57:39 crc kubenswrapper[4998]: I0203 08:57:39.741817 4998 generic.go:334] "Generic (PLEG): container finished" podID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerID="99553d5c67392a927217632b92e88e5cbb2c0ac0d3b5a0e8a67e3143f2297f16" exitCode=0 Feb 03 08:57:39 crc kubenswrapper[4998]: I0203 08:57:39.741923 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" event={"ID":"5c340901-5ea2-4862-b54d-241fc0f003dd","Type":"ContainerDied","Data":"99553d5c67392a927217632b92e88e5cbb2c0ac0d3b5a0e8a67e3143f2297f16"} Feb 03 08:57:39 crc kubenswrapper[4998]: I0203 08:57:39.742176 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" event={"ID":"5c340901-5ea2-4862-b54d-241fc0f003dd","Type":"ContainerStarted","Data":"2d1e89eafda37be2f94f925735af1cee5009df6d1b193355c61427e0a08a4625"} Feb 03 08:57:39 crc kubenswrapper[4998]: I0203 08:57:39.743608 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerStarted","Data":"34a9918254ccaa5093acf0d01685758eb798c972dddeea8810740b1c1575bcad"} Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.758838 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerStarted","Data":"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f"} Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.759210 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerStarted","Data":"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66"} Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.760806 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.763191 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" event={"ID":"5c340901-5ea2-4862-b54d-241fc0f003dd","Type":"ContainerStarted","Data":"41947ca52c7460b59dc8c3a1ad40a8b0f54ed7e5ae7dd40bbbaf241ff7481d18"} Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.763441 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:40 crc kubenswrapper[4998]: I0203 08:57:40.795987 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.795964778 podStartE2EDuration="2.795964778s" podCreationTimestamp="2026-02-03 08:57:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:57:40.78866271 +0000 UTC m=+7899.075356536" watchObservedRunningTime="2026-02-03 08:57:40.795964778 +0000 UTC m=+7899.082658584" Feb 03 08:57:42 crc kubenswrapper[4998]: I0203 08:57:42.754404 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 08:57:42 crc kubenswrapper[4998]: I0203 08:57:42.755733 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 08:57:42 crc kubenswrapper[4998]: I0203 08:57:42.755819 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 08:57:42 crc kubenswrapper[4998]: I0203 08:57:42.756624 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 08:57:42 crc kubenswrapper[4998]: I0203 08:57:42.756691 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115" gracePeriod=600 Feb 03 08:57:43 crc kubenswrapper[4998]: I0203 08:57:43.791514 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115" exitCode=0 Feb 03 08:57:43 crc kubenswrapper[4998]: I0203 08:57:43.791531 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115"} Feb 03 08:57:43 crc kubenswrapper[4998]: I0203 08:57:43.791927 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77"} Feb 03 08:57:43 crc kubenswrapper[4998]: I0203 08:57:43.791976 4998 scope.go:117] "RemoveContainer" containerID="8b73a3adb01550fb0197fbf3c3543111a8f8997fd8a03538adf5471f08cef4d6" Feb 03 08:57:43 crc kubenswrapper[4998]: I0203 08:57:43.821887 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" podStartSLOduration=5.821869834 podStartE2EDuration="5.821869834s" podCreationTimestamp="2026-02-03 08:57:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:57:40.825123685 +0000 UTC m=+7899.111817511" watchObservedRunningTime="2026-02-03 08:57:43.821869834 +0000 UTC m=+7902.108563640" Feb 03 08:57:48 crc kubenswrapper[4998]: I0203 08:57:48.439591 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:57:48 crc kubenswrapper[4998]: I0203 08:57:48.527840 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:57:48 crc kubenswrapper[4998]: I0203 08:57:48.528122 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="dnsmasq-dns" containerID="cri-o://b7bed04c1ea177b81630aab76718c0f2907612178db1ed57b1f24cf90460f077" gracePeriod=10 Feb 03 08:57:48 crc kubenswrapper[4998]: I0203 08:57:48.865742 4998 generic.go:334] "Generic (PLEG): container finished" podID="45377673-bf8c-4cfe-af9a-15c68705994f" containerID="b7bed04c1ea177b81630aab76718c0f2907612178db1ed57b1f24cf90460f077" exitCode=0 Feb 03 08:57:48 crc kubenswrapper[4998]: I0203 08:57:48.865818 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" event={"ID":"45377673-bf8c-4cfe-af9a-15c68705994f","Type":"ContainerDied","Data":"b7bed04c1ea177b81630aab76718c0f2907612178db1ed57b1f24cf90460f077"} Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.612008 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.795244 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc\") pod \"45377673-bf8c-4cfe-af9a-15c68705994f\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.795302 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config\") pod \"45377673-bf8c-4cfe-af9a-15c68705994f\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.795339 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb\") pod \"45377673-bf8c-4cfe-af9a-15c68705994f\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.795458 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qjcx\" (UniqueName: \"kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx\") pod \"45377673-bf8c-4cfe-af9a-15c68705994f\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.795630 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb\") pod \"45377673-bf8c-4cfe-af9a-15c68705994f\" (UID: \"45377673-bf8c-4cfe-af9a-15c68705994f\") " Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.814235 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx" (OuterVolumeSpecName: "kube-api-access-4qjcx") pod "45377673-bf8c-4cfe-af9a-15c68705994f" (UID: "45377673-bf8c-4cfe-af9a-15c68705994f"). InnerVolumeSpecName "kube-api-access-4qjcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.845582 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "45377673-bf8c-4cfe-af9a-15c68705994f" (UID: "45377673-bf8c-4cfe-af9a-15c68705994f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.870725 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "45377673-bf8c-4cfe-af9a-15c68705994f" (UID: "45377673-bf8c-4cfe-af9a-15c68705994f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.879405 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" event={"ID":"45377673-bf8c-4cfe-af9a-15c68705994f","Type":"ContainerDied","Data":"8665024ab03f85f2f564146333e10139af2ed09d89f839a4066ce8c5444975ad"} Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.879493 4998 scope.go:117] "RemoveContainer" containerID="b7bed04c1ea177b81630aab76718c0f2907612178db1ed57b1f24cf90460f077" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.879682 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6464cf648f-j45ff" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.888661 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config" (OuterVolumeSpecName: "config") pod "45377673-bf8c-4cfe-af9a-15c68705994f" (UID: "45377673-bf8c-4cfe-af9a-15c68705994f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.898742 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qjcx\" (UniqueName: \"kubernetes.io/projected/45377673-bf8c-4cfe-af9a-15c68705994f-kube-api-access-4qjcx\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.898791 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.898804 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.898817 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.898926 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "45377673-bf8c-4cfe-af9a-15c68705994f" (UID: "45377673-bf8c-4cfe-af9a-15c68705994f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:57:49 crc kubenswrapper[4998]: I0203 08:57:49.970184 4998 scope.go:117] "RemoveContainer" containerID="a32a340594b881eb8501a2057ca5cc99672e5a3fbe5aa644451602cc27282ea5" Feb 03 08:57:50 crc kubenswrapper[4998]: I0203 08:57:49.999984 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45377673-bf8c-4cfe-af9a-15c68705994f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:57:50 crc kubenswrapper[4998]: I0203 08:57:50.234422 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:57:50 crc kubenswrapper[4998]: I0203 08:57:50.245792 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6464cf648f-j45ff"] Feb 03 08:57:50 crc kubenswrapper[4998]: I0203 08:57:50.437509 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" path="/var/lib/kubelet/pods/45377673-bf8c-4cfe-af9a-15c68705994f/volumes" Feb 03 08:57:50 crc kubenswrapper[4998]: I0203 08:57:50.937252 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.534973 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:08 crc kubenswrapper[4998]: E0203 08:58:08.536005 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="dnsmasq-dns" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.536024 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="dnsmasq-dns" Feb 03 08:58:08 crc kubenswrapper[4998]: E0203 08:58:08.536053 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="init" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.536061 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="init" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.536295 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="45377673-bf8c-4cfe-af9a-15c68705994f" containerName="dnsmasq-dns" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.537538 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.540969 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.554573 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.659759 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.659833 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.659946 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.660021 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.660063 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.660111 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg2pp\" (UniqueName: \"kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.761730 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762157 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762198 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762251 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg2pp\" (UniqueName: \"kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762353 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762383 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.762478 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.768304 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.768363 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.768658 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.771390 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.781590 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg2pp\" (UniqueName: \"kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp\") pod \"cinder-scheduler-0\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:08 crc kubenswrapper[4998]: I0203 08:58:08.868006 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:09 crc kubenswrapper[4998]: I0203 08:58:09.306988 4998 scope.go:117] "RemoveContainer" containerID="1dbf921ced13b071febeffeb017ecaec85acbe375cedcbeaed8420a49f8cd91a" Feb 03 08:58:09 crc kubenswrapper[4998]: I0203 08:58:09.328870 4998 scope.go:117] "RemoveContainer" containerID="3279509fc81e6170b74d691581dbe0de32262389c25bd7a75fdde0600b4e6762" Feb 03 08:58:09 crc kubenswrapper[4998]: I0203 08:58:09.396805 4998 scope.go:117] "RemoveContainer" containerID="bbd6a7a2a751e5524188ba26d5d24f456b6f1837cf7780ea64e8bd515a44e782" Feb 03 08:58:09 crc kubenswrapper[4998]: I0203 08:58:09.415946 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:10 crc kubenswrapper[4998]: I0203 08:58:10.095343 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerStarted","Data":"43a62f7e2fe4405f8b080b4a96afbad859ac0ee345f7e0e60585b3f6513de026"} Feb 03 08:58:10 crc kubenswrapper[4998]: I0203 08:58:10.130966 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:10 crc kubenswrapper[4998]: I0203 08:58:10.131180 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api-log" containerID="cri-o://6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66" gracePeriod=30 Feb 03 08:58:10 crc kubenswrapper[4998]: I0203 08:58:10.132885 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api" containerID="cri-o://db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f" gracePeriod=30 Feb 03 08:58:11 crc kubenswrapper[4998]: I0203 08:58:11.105604 4998 generic.go:334] "Generic (PLEG): container finished" podID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerID="6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66" exitCode=143 Feb 03 08:58:11 crc kubenswrapper[4998]: I0203 08:58:11.105663 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerDied","Data":"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66"} Feb 03 08:58:11 crc kubenswrapper[4998]: I0203 08:58:11.108364 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerStarted","Data":"74a169217d458cd67b02090d748ad25b74aee08ae5aaf825e395f022fccd8dde"} Feb 03 08:58:11 crc kubenswrapper[4998]: I0203 08:58:11.108401 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerStarted","Data":"1e788c2a8ea7642091b58e7316ce33ea71683229e14529ad94af72ba6a79d536"} Feb 03 08:58:11 crc kubenswrapper[4998]: I0203 08:58:11.129386 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.808038567 podStartE2EDuration="3.129365873s" podCreationTimestamp="2026-02-03 08:58:08 +0000 UTC" firstStartedPulling="2026-02-03 08:58:09.4285649 +0000 UTC m=+7927.715258716" lastFinishedPulling="2026-02-03 08:58:09.749892176 +0000 UTC m=+7928.036586022" observedRunningTime="2026-02-03 08:58:11.125976576 +0000 UTC m=+7929.412670392" watchObservedRunningTime="2026-02-03 08:58:11.129365873 +0000 UTC m=+7929.416059699" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.715474 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863425 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwbbt\" (UniqueName: \"kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863547 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863618 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863689 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863727 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863767 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.863809 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs\") pod \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\" (UID: \"b8abf893-b316-43cc-a2dd-d3a5f14c7f98\") " Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.864587 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.864646 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs" (OuterVolumeSpecName: "logs") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.868731 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt" (OuterVolumeSpecName: "kube-api-access-gwbbt") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "kube-api-access-gwbbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.868732 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.872973 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.875866 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts" (OuterVolumeSpecName: "scripts") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.903405 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.944031 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data" (OuterVolumeSpecName: "config-data") pod "b8abf893-b316-43cc-a2dd-d3a5f14c7f98" (UID: "b8abf893-b316-43cc-a2dd-d3a5f14c7f98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965859 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-logs\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965901 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwbbt\" (UniqueName: \"kubernetes.io/projected/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-kube-api-access-gwbbt\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965915 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965927 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965939 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965952 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:13 crc kubenswrapper[4998]: I0203 08:58:13.965963 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8abf893-b316-43cc-a2dd-d3a5f14c7f98-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.132466 4998 generic.go:334] "Generic (PLEG): container finished" podID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerID="db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f" exitCode=0 Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.132519 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerDied","Data":"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f"} Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.132571 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8abf893-b316-43cc-a2dd-d3a5f14c7f98","Type":"ContainerDied","Data":"34a9918254ccaa5093acf0d01685758eb798c972dddeea8810740b1c1575bcad"} Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.132570 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.132589 4998 scope.go:117] "RemoveContainer" containerID="db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.157859 4998 scope.go:117] "RemoveContainer" containerID="6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.169822 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.182194 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.188156 4998 scope.go:117] "RemoveContainer" containerID="db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f" Feb 03 08:58:14 crc kubenswrapper[4998]: E0203 08:58:14.188575 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f\": container with ID starting with db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f not found: ID does not exist" containerID="db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.188611 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f"} err="failed to get container status \"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f\": rpc error: code = NotFound desc = could not find container \"db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f\": container with ID starting with db23495c4f7f722f78613e2c0eea1fa434167c457b7d6db0d3f71b12d6cc1f4f not found: ID does not exist" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.188637 4998 scope.go:117] "RemoveContainer" containerID="6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66" Feb 03 08:58:14 crc kubenswrapper[4998]: E0203 08:58:14.189169 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66\": container with ID starting with 6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66 not found: ID does not exist" containerID="6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.189215 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66"} err="failed to get container status \"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66\": rpc error: code = NotFound desc = could not find container \"6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66\": container with ID starting with 6c06ef6866765c4030713d6e39fbad7a7e57e13d41bd2fe16eb5ad010f423b66 not found: ID does not exist" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.192505 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:14 crc kubenswrapper[4998]: E0203 08:58:14.192893 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.192906 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api" Feb 03 08:58:14 crc kubenswrapper[4998]: E0203 08:58:14.192941 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api-log" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.192947 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api-log" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.193095 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.193113 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api-log" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.194011 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.195668 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.200154 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.271815 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.271893 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.271951 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2c79\" (UniqueName: \"kubernetes.io/projected/af29c0c2-777f-4625-be87-e41e23c29f71-kube-api-access-h2c79\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.271981 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-scripts\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.272003 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data-custom\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.272066 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af29c0c2-777f-4625-be87-e41e23c29f71-logs\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.272108 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af29c0c2-777f-4625-be87-e41e23c29f71-etc-machine-id\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.373720 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2c79\" (UniqueName: \"kubernetes.io/projected/af29c0c2-777f-4625-be87-e41e23c29f71-kube-api-access-h2c79\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.373855 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-scripts\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.373888 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data-custom\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.373950 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af29c0c2-777f-4625-be87-e41e23c29f71-logs\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.373992 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af29c0c2-777f-4625-be87-e41e23c29f71-etc-machine-id\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.374015 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.374038 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.374372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af29c0c2-777f-4625-be87-e41e23c29f71-etc-machine-id\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.375030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af29c0c2-777f-4625-be87-e41e23c29f71-logs\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.378278 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-scripts\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.378563 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data-custom\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.378657 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.379653 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af29c0c2-777f-4625-be87-e41e23c29f71-config-data\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.392960 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2c79\" (UniqueName: \"kubernetes.io/projected/af29c0c2-777f-4625-be87-e41e23c29f71-kube-api-access-h2c79\") pod \"cinder-api-0\" (UID: \"af29c0c2-777f-4625-be87-e41e23c29f71\") " pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.439695 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" path="/var/lib/kubelet/pods/b8abf893-b316-43cc-a2dd-d3a5f14c7f98/volumes" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.508208 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 08:58:14 crc kubenswrapper[4998]: I0203 08:58:14.949152 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 08:58:14 crc kubenswrapper[4998]: W0203 08:58:14.954349 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf29c0c2_777f_4625_be87_e41e23c29f71.slice/crio-85b16c5036c2e1cdc7dfdf6d596ee049a3be4272d1c6c7f69874cee5a8b624fa WatchSource:0}: Error finding container 85b16c5036c2e1cdc7dfdf6d596ee049a3be4272d1c6c7f69874cee5a8b624fa: Status 404 returned error can't find the container with id 85b16c5036c2e1cdc7dfdf6d596ee049a3be4272d1c6c7f69874cee5a8b624fa Feb 03 08:58:15 crc kubenswrapper[4998]: I0203 08:58:15.161563 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"af29c0c2-777f-4625-be87-e41e23c29f71","Type":"ContainerStarted","Data":"85b16c5036c2e1cdc7dfdf6d596ee049a3be4272d1c6c7f69874cee5a8b624fa"} Feb 03 08:58:16 crc kubenswrapper[4998]: I0203 08:58:16.196636 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"af29c0c2-777f-4625-be87-e41e23c29f71","Type":"ContainerStarted","Data":"fea1e5658f40c1d9c58e3621de5576bf6305b6f83004c625629ad9b515dcbdb7"} Feb 03 08:58:16 crc kubenswrapper[4998]: I0203 08:58:16.197025 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"af29c0c2-777f-4625-be87-e41e23c29f71","Type":"ContainerStarted","Data":"fecdc44bf033a1480b7d6f228a30ba03cd1f68e27fadb3b9381f933e9c3e8632"} Feb 03 08:58:16 crc kubenswrapper[4998]: I0203 08:58:16.197054 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 08:58:16 crc kubenswrapper[4998]: I0203 08:58:16.215759 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.215739626 podStartE2EDuration="2.215739626s" podCreationTimestamp="2026-02-03 08:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:58:16.214936464 +0000 UTC m=+7934.501630280" watchObservedRunningTime="2026-02-03 08:58:16.215739626 +0000 UTC m=+7934.502433422" Feb 03 08:58:18 crc kubenswrapper[4998]: I0203 08:58:18.587253 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="b8abf893-b316-43cc-a2dd-d3a5f14c7f98" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.68:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 08:58:19 crc kubenswrapper[4998]: I0203 08:58:19.063360 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 08:58:19 crc kubenswrapper[4998]: I0203 08:58:19.103580 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:19 crc kubenswrapper[4998]: I0203 08:58:19.226549 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="cinder-scheduler" containerID="cri-o://1e788c2a8ea7642091b58e7316ce33ea71683229e14529ad94af72ba6a79d536" gracePeriod=30 Feb 03 08:58:19 crc kubenswrapper[4998]: I0203 08:58:19.226596 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="probe" containerID="cri-o://74a169217d458cd67b02090d748ad25b74aee08ae5aaf825e395f022fccd8dde" gracePeriod=30 Feb 03 08:58:20 crc kubenswrapper[4998]: I0203 08:58:20.241503 4998 generic.go:334] "Generic (PLEG): container finished" podID="5512dd2d-7231-498f-90de-75cab8386e5f" containerID="74a169217d458cd67b02090d748ad25b74aee08ae5aaf825e395f022fccd8dde" exitCode=0 Feb 03 08:58:20 crc kubenswrapper[4998]: I0203 08:58:20.241882 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerDied","Data":"74a169217d458cd67b02090d748ad25b74aee08ae5aaf825e395f022fccd8dde"} Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.266722 4998 generic.go:334] "Generic (PLEG): container finished" podID="5512dd2d-7231-498f-90de-75cab8386e5f" containerID="1e788c2a8ea7642091b58e7316ce33ea71683229e14529ad94af72ba6a79d536" exitCode=0 Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.266753 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerDied","Data":"1e788c2a8ea7642091b58e7316ce33ea71683229e14529ad94af72ba6a79d536"} Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.517057 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.660846 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.661987 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg2pp\" (UniqueName: \"kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.662111 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.662172 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.662217 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.662248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id\") pod \"5512dd2d-7231-498f-90de-75cab8386e5f\" (UID: \"5512dd2d-7231-498f-90de-75cab8386e5f\") " Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.662624 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.663424 4998 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5512dd2d-7231-498f-90de-75cab8386e5f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.666693 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp" (OuterVolumeSpecName: "kube-api-access-zg2pp") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "kube-api-access-zg2pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.667435 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.673961 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts" (OuterVolumeSpecName: "scripts") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.726061 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.764994 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg2pp\" (UniqueName: \"kubernetes.io/projected/5512dd2d-7231-498f-90de-75cab8386e5f-kube-api-access-zg2pp\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.765032 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.765044 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.765054 4998 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.767073 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data" (OuterVolumeSpecName: "config-data") pod "5512dd2d-7231-498f-90de-75cab8386e5f" (UID: "5512dd2d-7231-498f-90de-75cab8386e5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:58:22 crc kubenswrapper[4998]: I0203 08:58:22.866171 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5512dd2d-7231-498f-90de-75cab8386e5f-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.281875 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5512dd2d-7231-498f-90de-75cab8386e5f","Type":"ContainerDied","Data":"43a62f7e2fe4405f8b080b4a96afbad859ac0ee345f7e0e60585b3f6513de026"} Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.281924 4998 scope.go:117] "RemoveContainer" containerID="74a169217d458cd67b02090d748ad25b74aee08ae5aaf825e395f022fccd8dde" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.282042 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.306984 4998 scope.go:117] "RemoveContainer" containerID="1e788c2a8ea7642091b58e7316ce33ea71683229e14529ad94af72ba6a79d536" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.314146 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.322695 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.340800 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:23 crc kubenswrapper[4998]: E0203 08:58:23.345972 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="cinder-scheduler" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.346155 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="cinder-scheduler" Feb 03 08:58:23 crc kubenswrapper[4998]: E0203 08:58:23.346204 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="probe" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.346210 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="probe" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.349275 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="probe" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.356431 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" containerName="cinder-scheduler" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.359175 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.365398 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.385543 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.479999 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/991a6b49-bfc0-4fa8-a503-1287a18010e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.480053 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.480099 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.480375 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbcc9\" (UniqueName: \"kubernetes.io/projected/991a6b49-bfc0-4fa8-a503-1287a18010e0-kube-api-access-mbcc9\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.480521 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.480598 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581706 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/991a6b49-bfc0-4fa8-a503-1287a18010e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581753 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581815 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581896 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/991a6b49-bfc0-4fa8-a503-1287a18010e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581910 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbcc9\" (UniqueName: \"kubernetes.io/projected/991a6b49-bfc0-4fa8-a503-1287a18010e0-kube-api-access-mbcc9\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.581997 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.582035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.585857 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.585926 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.586467 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.587540 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991a6b49-bfc0-4fa8-a503-1287a18010e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.601412 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbcc9\" (UniqueName: \"kubernetes.io/projected/991a6b49-bfc0-4fa8-a503-1287a18010e0-kube-api-access-mbcc9\") pod \"cinder-scheduler-0\" (UID: \"991a6b49-bfc0-4fa8-a503-1287a18010e0\") " pod="openstack/cinder-scheduler-0" Feb 03 08:58:23 crc kubenswrapper[4998]: I0203 08:58:23.693311 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 08:58:24 crc kubenswrapper[4998]: I0203 08:58:24.145453 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 08:58:24 crc kubenswrapper[4998]: I0203 08:58:24.335889 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"991a6b49-bfc0-4fa8-a503-1287a18010e0","Type":"ContainerStarted","Data":"b72c2de53f515de8fd8c4025818a6273d4bad8dd261633197865029b2f4f0ce3"} Feb 03 08:58:24 crc kubenswrapper[4998]: I0203 08:58:24.441076 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5512dd2d-7231-498f-90de-75cab8386e5f" path="/var/lib/kubelet/pods/5512dd2d-7231-498f-90de-75cab8386e5f/volumes" Feb 03 08:58:25 crc kubenswrapper[4998]: I0203 08:58:25.352095 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"991a6b49-bfc0-4fa8-a503-1287a18010e0","Type":"ContainerStarted","Data":"5ba40030ecaea5e938486e8ba1bbc50f2f5e7741d0d82f2fb633c8a79d54f145"} Feb 03 08:58:26 crc kubenswrapper[4998]: I0203 08:58:26.376759 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 03 08:58:26 crc kubenswrapper[4998]: I0203 08:58:26.379331 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"991a6b49-bfc0-4fa8-a503-1287a18010e0","Type":"ContainerStarted","Data":"ac0813e8d8d745c08c6fa8b8be4b2c8c542a8fd8a621447c33b789d087cb10b3"} Feb 03 08:58:26 crc kubenswrapper[4998]: I0203 08:58:26.446224 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.4462040099999998 podStartE2EDuration="3.44620401s" podCreationTimestamp="2026-02-03 08:58:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:58:26.444198913 +0000 UTC m=+7944.730892729" watchObservedRunningTime="2026-02-03 08:58:26.44620401 +0000 UTC m=+7944.732897826" Feb 03 08:58:28 crc kubenswrapper[4998]: I0203 08:58:28.693731 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 08:58:33 crc kubenswrapper[4998]: I0203 08:58:33.944395 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.057472 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nzthm"] Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.059568 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.086741 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nzthm"] Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.106042 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.106400 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr4wf\" (UniqueName: \"kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.159376 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-940a-account-create-update-d2hxr"] Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.160583 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.162247 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.179268 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-940a-account-create-update-d2hxr"] Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.209366 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkgqc\" (UniqueName: \"kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.209438 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr4wf\" (UniqueName: \"kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.209569 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.209633 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.210501 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.231573 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr4wf\" (UniqueName: \"kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf\") pod \"glance-db-create-nzthm\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.311720 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.311859 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkgqc\" (UniqueName: \"kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.312915 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.328898 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkgqc\" (UniqueName: \"kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc\") pod \"glance-940a-account-create-update-d2hxr\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.386045 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nzthm" Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.478850 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:35 crc kubenswrapper[4998]: W0203 08:58:35.781056 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc00f9260_e15f_4da2_80bc_2a17a188ad20.slice/crio-ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14 WatchSource:0}: Error finding container ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14: Status 404 returned error can't find the container with id ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14 Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.781421 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-940a-account-create-update-d2hxr"] Feb 03 08:58:35 crc kubenswrapper[4998]: I0203 08:58:35.893374 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nzthm"] Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.479576 4998 generic.go:334] "Generic (PLEG): container finished" podID="2007b61e-5077-42b3-91d8-88725aef70a6" containerID="95da58851bc9946b8a4fa2b4da776de8120597826477bfed63a1cfd057a47cc4" exitCode=0 Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.479923 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nzthm" event={"ID":"2007b61e-5077-42b3-91d8-88725aef70a6","Type":"ContainerDied","Data":"95da58851bc9946b8a4fa2b4da776de8120597826477bfed63a1cfd057a47cc4"} Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.479955 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nzthm" event={"ID":"2007b61e-5077-42b3-91d8-88725aef70a6","Type":"ContainerStarted","Data":"078b795b0402440fd552bde15d9e6eabb4aaf3851cf8d8317f2f0b9f3735f521"} Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.482555 4998 generic.go:334] "Generic (PLEG): container finished" podID="c00f9260-e15f-4da2-80bc-2a17a188ad20" containerID="92b69d37e147edcc833dc9fbd9c4326a8983cd20bb17ca5e1265fb5e95ef1f99" exitCode=0 Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.482602 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-940a-account-create-update-d2hxr" event={"ID":"c00f9260-e15f-4da2-80bc-2a17a188ad20","Type":"ContainerDied","Data":"92b69d37e147edcc833dc9fbd9c4326a8983cd20bb17ca5e1265fb5e95ef1f99"} Feb 03 08:58:36 crc kubenswrapper[4998]: I0203 08:58:36.482639 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-940a-account-create-update-d2hxr" event={"ID":"c00f9260-e15f-4da2-80bc-2a17a188ad20","Type":"ContainerStarted","Data":"ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14"} Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.848163 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nzthm" Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.875054 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts\") pod \"2007b61e-5077-42b3-91d8-88725aef70a6\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.875134 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr4wf\" (UniqueName: \"kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf\") pod \"2007b61e-5077-42b3-91d8-88725aef70a6\" (UID: \"2007b61e-5077-42b3-91d8-88725aef70a6\") " Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.876511 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2007b61e-5077-42b3-91d8-88725aef70a6" (UID: "2007b61e-5077-42b3-91d8-88725aef70a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.881237 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf" (OuterVolumeSpecName: "kube-api-access-zr4wf") pod "2007b61e-5077-42b3-91d8-88725aef70a6" (UID: "2007b61e-5077-42b3-91d8-88725aef70a6"). InnerVolumeSpecName "kube-api-access-zr4wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:58:37 crc kubenswrapper[4998]: I0203 08:58:37.935651 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.001606 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2007b61e-5077-42b3-91d8-88725aef70a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.001640 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr4wf\" (UniqueName: \"kubernetes.io/projected/2007b61e-5077-42b3-91d8-88725aef70a6-kube-api-access-zr4wf\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.103204 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkgqc\" (UniqueName: \"kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc\") pod \"c00f9260-e15f-4da2-80bc-2a17a188ad20\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.103288 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts\") pod \"c00f9260-e15f-4da2-80bc-2a17a188ad20\" (UID: \"c00f9260-e15f-4da2-80bc-2a17a188ad20\") " Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.104024 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c00f9260-e15f-4da2-80bc-2a17a188ad20" (UID: "c00f9260-e15f-4da2-80bc-2a17a188ad20"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.108126 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc" (OuterVolumeSpecName: "kube-api-access-qkgqc") pod "c00f9260-e15f-4da2-80bc-2a17a188ad20" (UID: "c00f9260-e15f-4da2-80bc-2a17a188ad20"). InnerVolumeSpecName "kube-api-access-qkgqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.205107 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkgqc\" (UniqueName: \"kubernetes.io/projected/c00f9260-e15f-4da2-80bc-2a17a188ad20-kube-api-access-qkgqc\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.205140 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c00f9260-e15f-4da2-80bc-2a17a188ad20-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.500947 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nzthm" event={"ID":"2007b61e-5077-42b3-91d8-88725aef70a6","Type":"ContainerDied","Data":"078b795b0402440fd552bde15d9e6eabb4aaf3851cf8d8317f2f0b9f3735f521"} Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.501182 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="078b795b0402440fd552bde15d9e6eabb4aaf3851cf8d8317f2f0b9f3735f521" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.500964 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nzthm" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.502495 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-940a-account-create-update-d2hxr" event={"ID":"c00f9260-e15f-4da2-80bc-2a17a188ad20","Type":"ContainerDied","Data":"ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14"} Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.502521 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ecfc916ba084ea35ef5e3824e5a5e9144c5ca40f65dac1435f818fd5e52d3a14" Feb 03 08:58:38 crc kubenswrapper[4998]: I0203 08:58:38.502524 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-940a-account-create-update-d2hxr" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.305325 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-f5m7z"] Feb 03 08:58:40 crc kubenswrapper[4998]: E0203 08:58:40.306115 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2007b61e-5077-42b3-91d8-88725aef70a6" containerName="mariadb-database-create" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.306132 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2007b61e-5077-42b3-91d8-88725aef70a6" containerName="mariadb-database-create" Feb 03 08:58:40 crc kubenswrapper[4998]: E0203 08:58:40.306148 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c00f9260-e15f-4da2-80bc-2a17a188ad20" containerName="mariadb-account-create-update" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.306156 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c00f9260-e15f-4da2-80bc-2a17a188ad20" containerName="mariadb-account-create-update" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.306367 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2007b61e-5077-42b3-91d8-88725aef70a6" containerName="mariadb-database-create" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.306385 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c00f9260-e15f-4da2-80bc-2a17a188ad20" containerName="mariadb-account-create-update" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.307094 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.311167 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l4z5" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.311389 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.374505 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-f5m7z"] Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.443927 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.444050 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.444091 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8q5f\" (UniqueName: \"kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.444138 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.546264 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.546371 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.546409 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8q5f\" (UniqueName: \"kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.546444 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.556412 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.559299 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.561079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.570969 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8q5f\" (UniqueName: \"kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f\") pod \"glance-db-sync-f5m7z\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:40 crc kubenswrapper[4998]: I0203 08:58:40.624332 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f5m7z" Feb 03 08:58:41 crc kubenswrapper[4998]: W0203 08:58:41.185242 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebbf8d47_3f9c_451c_ba8f_d37a8c74e692.slice/crio-3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67 WatchSource:0}: Error finding container 3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67: Status 404 returned error can't find the container with id 3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67 Feb 03 08:58:41 crc kubenswrapper[4998]: I0203 08:58:41.188147 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-f5m7z"] Feb 03 08:58:41 crc kubenswrapper[4998]: I0203 08:58:41.531963 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f5m7z" event={"ID":"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692","Type":"ContainerStarted","Data":"3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67"} Feb 03 08:59:03 crc kubenswrapper[4998]: E0203 08:59:03.721160 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-glance-api:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:59:03 crc kubenswrapper[4998]: E0203 08:59:03.721809 4998 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-glance-api:cac82611632fe132c9e9b85f289b0dbc" Feb 03 08:59:03 crc kubenswrapper[4998]: E0203 08:59:03.721970 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-glance-api:cac82611632fe132c9e9b85f289b0dbc,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t8q5f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-f5m7z_openstack(ebbf8d47-3f9c-451c-ba8f-d37a8c74e692): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 08:59:03 crc kubenswrapper[4998]: E0203 08:59:03.723177 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-f5m7z" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" Feb 03 08:59:03 crc kubenswrapper[4998]: E0203 08:59:03.795137 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-glance-api:cac82611632fe132c9e9b85f289b0dbc\\\"\"" pod="openstack/glance-db-sync-f5m7z" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" Feb 03 08:59:09 crc kubenswrapper[4998]: I0203 08:59:09.503988 4998 scope.go:117] "RemoveContainer" containerID="43c34846f85a0a5ab432878af71cd16112e389d0d278f5b70a51f2222a9b9e5c" Feb 03 08:59:19 crc kubenswrapper[4998]: I0203 08:59:19.094332 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f5m7z" event={"ID":"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692","Type":"ContainerStarted","Data":"0bcfd5400f96288128f3d5fdd47a724da2b19a30f4f1341919ab921e753209a6"} Feb 03 08:59:19 crc kubenswrapper[4998]: I0203 08:59:19.121800 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-f5m7z" podStartSLOduration=2.1138180699999998 podStartE2EDuration="39.121758775s" podCreationTimestamp="2026-02-03 08:58:40 +0000 UTC" firstStartedPulling="2026-02-03 08:58:41.190999268 +0000 UTC m=+7959.477693074" lastFinishedPulling="2026-02-03 08:59:18.198939973 +0000 UTC m=+7996.485633779" observedRunningTime="2026-02-03 08:59:19.116197367 +0000 UTC m=+7997.402891213" watchObservedRunningTime="2026-02-03 08:59:19.121758775 +0000 UTC m=+7997.408452581" Feb 03 08:59:24 crc kubenswrapper[4998]: I0203 08:59:24.154661 4998 generic.go:334] "Generic (PLEG): container finished" podID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" containerID="0bcfd5400f96288128f3d5fdd47a724da2b19a30f4f1341919ab921e753209a6" exitCode=0 Feb 03 08:59:24 crc kubenswrapper[4998]: I0203 08:59:24.154719 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f5m7z" event={"ID":"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692","Type":"ContainerDied","Data":"0bcfd5400f96288128f3d5fdd47a724da2b19a30f4f1341919ab921e753209a6"} Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.557260 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f5m7z" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.656595 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data\") pod \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.656659 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8q5f\" (UniqueName: \"kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f\") pod \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.656681 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle\") pod \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.656763 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data\") pod \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\" (UID: \"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692\") " Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.663143 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f" (OuterVolumeSpecName: "kube-api-access-t8q5f") pod "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" (UID: "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692"). InnerVolumeSpecName "kube-api-access-t8q5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.663748 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" (UID: "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.678712 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" (UID: "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.696151 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data" (OuterVolumeSpecName: "config-data") pod "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" (UID: "ebbf8d47-3f9c-451c-ba8f-d37a8c74e692"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.758321 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.758360 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8q5f\" (UniqueName: \"kubernetes.io/projected/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-kube-api-access-t8q5f\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.758374 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:25 crc kubenswrapper[4998]: I0203 08:59:25.758385 4998 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.174770 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f5m7z" event={"ID":"ebbf8d47-3f9c-451c-ba8f-d37a8c74e692","Type":"ContainerDied","Data":"3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67"} Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.174897 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3aa16808797c5357e623f8da1719bf229455789cccb6ee557ce77cc430e81a67" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.174866 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f5m7z" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.479370 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:26 crc kubenswrapper[4998]: E0203 08:59:26.479752 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" containerName="glance-db-sync" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.479770 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" containerName="glance-db-sync" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.479958 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" containerName="glance-db-sync" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.482968 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.488517 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.488713 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.488862 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l4z5" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.505340 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.571769 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.572061 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.572078 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.572201 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.572241 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.572271 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-628z6\" (UniqueName: \"kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.619647 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.621138 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.637244 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673707 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673753 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673799 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673835 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673862 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673886 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl6s6\" (UniqueName: \"kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673903 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-628z6\" (UniqueName: \"kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673959 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673975 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.673990 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.674025 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.674986 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.676216 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.682791 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.686036 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.699377 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.703670 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-628z6\" (UniqueName: \"kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6\") pod \"glance-default-external-api-0\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.714251 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.716435 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.720546 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.721030 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.778635 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.778673 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.778877 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.778939 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl6s6\" (UniqueName: \"kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.779142 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.780129 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.780203 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.780723 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.781077 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.797485 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl6s6\" (UniqueName: \"kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6\") pod \"dnsmasq-dns-5c77f645c-4sb7w\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.822263 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880347 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880552 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880592 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwlm6\" (UniqueName: \"kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880636 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880682 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.880735 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.937636 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.981963 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.982008 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwlm6\" (UniqueName: \"kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.982060 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.982084 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.982129 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.982164 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.983202 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.983823 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.989635 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.991600 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:26 crc kubenswrapper[4998]: I0203 08:59:26.998536 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.004481 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwlm6\" (UniqueName: \"kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6\") pod \"glance-default-internal-api-0\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.073349 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.389106 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:27 crc kubenswrapper[4998]: W0203 08:59:27.507686 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda08a9a82_c064_43c5_bef3_7f2d85cd18e5.slice/crio-0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691 WatchSource:0}: Error finding container 0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691: Status 404 returned error can't find the container with id 0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691 Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.507720 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 08:59:27 crc kubenswrapper[4998]: W0203 08:59:27.666495 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e93aa17_56ba_4b4c_8845_da4f973d48e2.slice/crio-d4c61e02179a5c39c40bdc2d72bea28435384b8f6771bf2867c1e77c09e756df WatchSource:0}: Error finding container d4c61e02179a5c39c40bdc2d72bea28435384b8f6771bf2867c1e77c09e756df: Status 404 returned error can't find the container with id d4c61e02179a5c39c40bdc2d72bea28435384b8f6771bf2867c1e77c09e756df Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.669017 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:27 crc kubenswrapper[4998]: I0203 08:59:27.841486 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.218414 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerStarted","Data":"d9b5e99f5bfa1fa75b79fcd9efd297570ca8b311d64359ae0d5ece909e36ffcd"} Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.218720 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerStarted","Data":"4a87554ce99f07ea6bb4b6bba74f7a478fa5925a92dc1a737b5f65a5b396e508"} Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.220093 4998 generic.go:334] "Generic (PLEG): container finished" podID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerID="0d6dfd70db670ab6c62e2ec19f878b4c4f6a001506ecebe77e45c6c39723fe5f" exitCode=0 Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.220166 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" event={"ID":"a08a9a82-c064-43c5-bef3-7f2d85cd18e5","Type":"ContainerDied","Data":"0d6dfd70db670ab6c62e2ec19f878b4c4f6a001506ecebe77e45c6c39723fe5f"} Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.220206 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" event={"ID":"a08a9a82-c064-43c5-bef3-7f2d85cd18e5","Type":"ContainerStarted","Data":"0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691"} Feb 03 08:59:28 crc kubenswrapper[4998]: I0203 08:59:28.227348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerStarted","Data":"d4c61e02179a5c39c40bdc2d72bea28435384b8f6771bf2867c1e77c09e756df"} Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.239420 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerStarted","Data":"6ee84862794bbc0eb61ed05377205dc039fb1d5da928fdc1bfa88e38d28762a4"} Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.239952 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerStarted","Data":"d60789074fe60e99a1ca34d9c289b0959423fc9e98bcc1b4c523243f581dbf2a"} Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.242123 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerStarted","Data":"77d72051fa5a905123eae0e0bee6dc6f64eeffb5749d7416933f2878151a62fd"} Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.242205 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-httpd" containerID="cri-o://77d72051fa5a905123eae0e0bee6dc6f64eeffb5749d7416933f2878151a62fd" gracePeriod=30 Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.242201 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-log" containerID="cri-o://d9b5e99f5bfa1fa75b79fcd9efd297570ca8b311d64359ae0d5ece909e36ffcd" gracePeriod=30 Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.244514 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" event={"ID":"a08a9a82-c064-43c5-bef3-7f2d85cd18e5","Type":"ContainerStarted","Data":"f159a68d249149e532b4e23ed202fd8b3e27eeba75935c2c911ae310e321216f"} Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.244706 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.258972 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.258955092 podStartE2EDuration="3.258955092s" podCreationTimestamp="2026-02-03 08:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:59:29.258078907 +0000 UTC m=+8007.544772723" watchObservedRunningTime="2026-02-03 08:59:29.258955092 +0000 UTC m=+8007.545648898" Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.282802 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" podStartSLOduration=3.282774158 podStartE2EDuration="3.282774158s" podCreationTimestamp="2026-02-03 08:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:59:29.277111517 +0000 UTC m=+8007.563805333" watchObservedRunningTime="2026-02-03 08:59:29.282774158 +0000 UTC m=+8007.569467964" Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.302683 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.302663432 podStartE2EDuration="3.302663432s" podCreationTimestamp="2026-02-03 08:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:59:29.296858397 +0000 UTC m=+8007.583552223" watchObservedRunningTime="2026-02-03 08:59:29.302663432 +0000 UTC m=+8007.589357238" Feb 03 08:59:29 crc kubenswrapper[4998]: I0203 08:59:29.800477 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.258058 4998 generic.go:334] "Generic (PLEG): container finished" podID="a396ef0d-d880-46b3-815f-11315db55a65" containerID="77d72051fa5a905123eae0e0bee6dc6f64eeffb5749d7416933f2878151a62fd" exitCode=0 Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.258098 4998 generic.go:334] "Generic (PLEG): container finished" podID="a396ef0d-d880-46b3-815f-11315db55a65" containerID="d9b5e99f5bfa1fa75b79fcd9efd297570ca8b311d64359ae0d5ece909e36ffcd" exitCode=143 Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.259086 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerDied","Data":"77d72051fa5a905123eae0e0bee6dc6f64eeffb5749d7416933f2878151a62fd"} Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.259120 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerDied","Data":"d9b5e99f5bfa1fa75b79fcd9efd297570ca8b311d64359ae0d5ece909e36ffcd"} Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.422434 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.558455 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.558905 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-628z6\" (UniqueName: \"kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.558952 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.559030 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.559165 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.559214 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle\") pod \"a396ef0d-d880-46b3-815f-11315db55a65\" (UID: \"a396ef0d-d880-46b3-815f-11315db55a65\") " Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.559437 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.560474 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.560696 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs" (OuterVolumeSpecName: "logs") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.569409 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6" (OuterVolumeSpecName: "kube-api-access-628z6") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "kube-api-access-628z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.573447 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts" (OuterVolumeSpecName: "scripts") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.585231 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.612906 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data" (OuterVolumeSpecName: "config-data") pod "a396ef0d-d880-46b3-815f-11315db55a65" (UID: "a396ef0d-d880-46b3-815f-11315db55a65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.662582 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.662619 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.662629 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a396ef0d-d880-46b3-815f-11315db55a65-logs\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.662639 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-628z6\" (UniqueName: \"kubernetes.io/projected/a396ef0d-d880-46b3-815f-11315db55a65-kube-api-access-628z6\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:30 crc kubenswrapper[4998]: I0203 08:59:30.662648 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a396ef0d-d880-46b3-815f-11315db55a65-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.271155 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-log" containerID="cri-o://d60789074fe60e99a1ca34d9c289b0959423fc9e98bcc1b4c523243f581dbf2a" gracePeriod=30 Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.271592 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.272108 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a396ef0d-d880-46b3-815f-11315db55a65","Type":"ContainerDied","Data":"4a87554ce99f07ea6bb4b6bba74f7a478fa5925a92dc1a737b5f65a5b396e508"} Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.272185 4998 scope.go:117] "RemoveContainer" containerID="77d72051fa5a905123eae0e0bee6dc6f64eeffb5749d7416933f2878151a62fd" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.272175 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-httpd" containerID="cri-o://6ee84862794bbc0eb61ed05377205dc039fb1d5da928fdc1bfa88e38d28762a4" gracePeriod=30 Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.309136 4998 scope.go:117] "RemoveContainer" containerID="d9b5e99f5bfa1fa75b79fcd9efd297570ca8b311d64359ae0d5ece909e36ffcd" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.322045 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.334052 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.358537 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:31 crc kubenswrapper[4998]: E0203 08:59:31.364521 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-log" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.364716 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-log" Feb 03 08:59:31 crc kubenswrapper[4998]: E0203 08:59:31.364856 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-httpd" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.364934 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-httpd" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.365225 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-log" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.365439 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a396ef0d-d880-46b3-815f-11315db55a65" containerName="glance-httpd" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.366715 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.369563 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.370248 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.477256 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.477390 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.477652 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.477832 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.478179 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfr22\" (UniqueName: \"kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.478274 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580051 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580172 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfr22\" (UniqueName: \"kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580216 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580302 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580330 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580398 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.580767 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.581714 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.584736 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.585687 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.589283 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.600951 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfr22\" (UniqueName: \"kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22\") pod \"glance-default-external-api-0\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " pod="openstack/glance-default-external-api-0" Feb 03 08:59:31 crc kubenswrapper[4998]: I0203 08:59:31.768243 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.391037 4998 generic.go:334] "Generic (PLEG): container finished" podID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerID="6ee84862794bbc0eb61ed05377205dc039fb1d5da928fdc1bfa88e38d28762a4" exitCode=0 Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.391309 4998 generic.go:334] "Generic (PLEG): container finished" podID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerID="d60789074fe60e99a1ca34d9c289b0959423fc9e98bcc1b4c523243f581dbf2a" exitCode=143 Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.391118 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerDied","Data":"6ee84862794bbc0eb61ed05377205dc039fb1d5da928fdc1bfa88e38d28762a4"} Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.391391 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerDied","Data":"d60789074fe60e99a1ca34d9c289b0959423fc9e98bcc1b4c523243f581dbf2a"} Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.443695 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a396ef0d-d880-46b3-815f-11315db55a65" path="/var/lib/kubelet/pods/a396ef0d-d880-46b3-815f-11315db55a65/volumes" Feb 03 08:59:32 crc kubenswrapper[4998]: W0203 08:59:32.480111 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bbff215_ce70_481e_b365_c88d60876046.slice/crio-a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6 WatchSource:0}: Error finding container a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6: Status 404 returned error can't find the container with id a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6 Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.482960 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.568977 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603211 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603287 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603374 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603476 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603504 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwlm6\" (UniqueName: \"kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.603761 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run\") pod \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\" (UID: \"9e93aa17-56ba-4b4c-8845-da4f973d48e2\") " Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.604140 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs" (OuterVolumeSpecName: "logs") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.604348 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.604988 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-logs\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.605015 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e93aa17-56ba-4b4c-8845-da4f973d48e2-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.613648 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6" (OuterVolumeSpecName: "kube-api-access-xwlm6") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "kube-api-access-xwlm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.614549 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts" (OuterVolumeSpecName: "scripts") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.636691 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.658225 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data" (OuterVolumeSpecName: "config-data") pod "9e93aa17-56ba-4b4c-8845-da4f973d48e2" (UID: "9e93aa17-56ba-4b4c-8845-da4f973d48e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.706433 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.706472 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.706486 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e93aa17-56ba-4b4c-8845-da4f973d48e2-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:32 crc kubenswrapper[4998]: I0203 08:59:32.706500 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwlm6\" (UniqueName: \"kubernetes.io/projected/9e93aa17-56ba-4b4c-8845-da4f973d48e2-kube-api-access-xwlm6\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.403997 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e93aa17-56ba-4b4c-8845-da4f973d48e2","Type":"ContainerDied","Data":"d4c61e02179a5c39c40bdc2d72bea28435384b8f6771bf2867c1e77c09e756df"} Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.404329 4998 scope.go:117] "RemoveContainer" containerID="6ee84862794bbc0eb61ed05377205dc039fb1d5da928fdc1bfa88e38d28762a4" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.404018 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.407064 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerStarted","Data":"e0eb951f59cc81cd84f7352acf66f491cedc1d18f5b15f1f270fcf806e59eca3"} Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.407098 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerStarted","Data":"a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6"} Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.461702 4998 scope.go:117] "RemoveContainer" containerID="d60789074fe60e99a1ca34d9c289b0959423fc9e98bcc1b4c523243f581dbf2a" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.461871 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.470437 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.499801 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:33 crc kubenswrapper[4998]: E0203 08:59:33.500392 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-httpd" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.500411 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-httpd" Feb 03 08:59:33 crc kubenswrapper[4998]: E0203 08:59:33.500438 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-log" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.500466 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-log" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.500725 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-httpd" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.500748 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" containerName="glance-log" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.502323 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.504594 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.514909 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.624995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.625060 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.625079 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.625103 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.625140 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.625158 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl22b\" (UniqueName: \"kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.726943 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.727045 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.727080 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.727117 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.727165 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.727182 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl22b\" (UniqueName: \"kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.729217 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.729301 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.734863 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.735450 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.736019 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.746271 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl22b\" (UniqueName: \"kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b\") pod \"glance-default-internal-api-0\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " pod="openstack/glance-default-internal-api-0" Feb 03 08:59:33 crc kubenswrapper[4998]: I0203 08:59:33.834818 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:34 crc kubenswrapper[4998]: I0203 08:59:34.401716 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 08:59:34 crc kubenswrapper[4998]: I0203 08:59:34.424260 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerStarted","Data":"c91c19cfc1b81f291034be303b73960734521d310e296607382f07a72d87478a"} Feb 03 08:59:34 crc kubenswrapper[4998]: I0203 08:59:34.448990 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.448968066 podStartE2EDuration="3.448968066s" podCreationTimestamp="2026-02-03 08:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:59:34.447176215 +0000 UTC m=+8012.733870031" watchObservedRunningTime="2026-02-03 08:59:34.448968066 +0000 UTC m=+8012.735661872" Feb 03 08:59:34 crc kubenswrapper[4998]: I0203 08:59:34.475752 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e93aa17-56ba-4b4c-8845-da4f973d48e2" path="/var/lib/kubelet/pods/9e93aa17-56ba-4b4c-8845-da4f973d48e2/volumes" Feb 03 08:59:35 crc kubenswrapper[4998]: I0203 08:59:35.447945 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerStarted","Data":"8f625047d9c339deca82aaa35a157accc63e915b898cda507604c573e8945883"} Feb 03 08:59:35 crc kubenswrapper[4998]: I0203 08:59:35.448287 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerStarted","Data":"9d7b9ae810beee5374f37a4b8a0ac955ce118cb34c97dd80b6b28aa11a02f770"} Feb 03 08:59:36 crc kubenswrapper[4998]: I0203 08:59:36.464853 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerStarted","Data":"a491e52e6f6b8a080073ac81d9d83bc305117b38152c9a6748c9e053ab82fb2c"} Feb 03 08:59:36 crc kubenswrapper[4998]: I0203 08:59:36.491790 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.491749531 podStartE2EDuration="3.491749531s" podCreationTimestamp="2026-02-03 08:59:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 08:59:36.486612735 +0000 UTC m=+8014.773306551" watchObservedRunningTime="2026-02-03 08:59:36.491749531 +0000 UTC m=+8014.778443347" Feb 03 08:59:36 crc kubenswrapper[4998]: I0203 08:59:36.940250 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.017086 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.017378 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="dnsmasq-dns" containerID="cri-o://41947ca52c7460b59dc8c3a1ad40a8b0f54ed7e5ae7dd40bbbaf241ff7481d18" gracePeriod=10 Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.477537 4998 generic.go:334] "Generic (PLEG): container finished" podID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerID="41947ca52c7460b59dc8c3a1ad40a8b0f54ed7e5ae7dd40bbbaf241ff7481d18" exitCode=0 Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.477923 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" event={"ID":"5c340901-5ea2-4862-b54d-241fc0f003dd","Type":"ContainerDied","Data":"41947ca52c7460b59dc8c3a1ad40a8b0f54ed7e5ae7dd40bbbaf241ff7481d18"} Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.477980 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" event={"ID":"5c340901-5ea2-4862-b54d-241fc0f003dd","Type":"ContainerDied","Data":"2d1e89eafda37be2f94f925735af1cee5009df6d1b193355c61427e0a08a4625"} Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.477993 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d1e89eafda37be2f94f925735af1cee5009df6d1b193355c61427e0a08a4625" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.523888 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.615673 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grbb4\" (UniqueName: \"kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4\") pod \"5c340901-5ea2-4862-b54d-241fc0f003dd\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.615860 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc\") pod \"5c340901-5ea2-4862-b54d-241fc0f003dd\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.615926 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb\") pod \"5c340901-5ea2-4862-b54d-241fc0f003dd\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.615987 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config\") pod \"5c340901-5ea2-4862-b54d-241fc0f003dd\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.616064 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb\") pod \"5c340901-5ea2-4862-b54d-241fc0f003dd\" (UID: \"5c340901-5ea2-4862-b54d-241fc0f003dd\") " Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.621079 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4" (OuterVolumeSpecName: "kube-api-access-grbb4") pod "5c340901-5ea2-4862-b54d-241fc0f003dd" (UID: "5c340901-5ea2-4862-b54d-241fc0f003dd"). InnerVolumeSpecName "kube-api-access-grbb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.661087 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c340901-5ea2-4862-b54d-241fc0f003dd" (UID: "5c340901-5ea2-4862-b54d-241fc0f003dd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.664559 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c340901-5ea2-4862-b54d-241fc0f003dd" (UID: "5c340901-5ea2-4862-b54d-241fc0f003dd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.666999 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config" (OuterVolumeSpecName: "config") pod "5c340901-5ea2-4862-b54d-241fc0f003dd" (UID: "5c340901-5ea2-4862-b54d-241fc0f003dd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.678724 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c340901-5ea2-4862-b54d-241fc0f003dd" (UID: "5c340901-5ea2-4862-b54d-241fc0f003dd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.718725 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.718799 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grbb4\" (UniqueName: \"kubernetes.io/projected/5c340901-5ea2-4862-b54d-241fc0f003dd-kube-api-access-grbb4\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.718814 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.718831 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:37 crc kubenswrapper[4998]: I0203 08:59:37.718843 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c340901-5ea2-4862-b54d-241fc0f003dd-config\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:38 crc kubenswrapper[4998]: I0203 08:59:38.484910 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c876cf45-ptl75" Feb 03 08:59:38 crc kubenswrapper[4998]: I0203 08:59:38.505978 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:59:38 crc kubenswrapper[4998]: I0203 08:59:38.515900 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78c876cf45-ptl75"] Feb 03 08:59:40 crc kubenswrapper[4998]: I0203 08:59:40.441770 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" path="/var/lib/kubelet/pods/5c340901-5ea2-4862-b54d-241fc0f003dd/volumes" Feb 03 08:59:41 crc kubenswrapper[4998]: I0203 08:59:41.768425 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 08:59:41 crc kubenswrapper[4998]: I0203 08:59:41.768932 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 08:59:41 crc kubenswrapper[4998]: I0203 08:59:41.800952 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 08:59:41 crc kubenswrapper[4998]: I0203 08:59:41.819718 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 08:59:42 crc kubenswrapper[4998]: I0203 08:59:42.521836 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 08:59:42 crc kubenswrapper[4998]: I0203 08:59:42.521911 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 08:59:43 crc kubenswrapper[4998]: I0203 08:59:43.835811 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:43 crc kubenswrapper[4998]: I0203 08:59:43.836113 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:43 crc kubenswrapper[4998]: I0203 08:59:43.877694 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:43 crc kubenswrapper[4998]: I0203 08:59:43.877752 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:44 crc kubenswrapper[4998]: I0203 08:59:44.444371 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 08:59:44 crc kubenswrapper[4998]: I0203 08:59:44.446656 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 08:59:44 crc kubenswrapper[4998]: I0203 08:59:44.539996 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:44 crc kubenswrapper[4998]: I0203 08:59:44.540350 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:46 crc kubenswrapper[4998]: I0203 08:59:46.491615 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:46 crc kubenswrapper[4998]: I0203 08:59:46.507277 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.221962 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9mbqc"] Feb 03 08:59:56 crc kubenswrapper[4998]: E0203 08:59:56.222888 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="init" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.222903 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="init" Feb 03 08:59:56 crc kubenswrapper[4998]: E0203 08:59:56.222921 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="dnsmasq-dns" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.222928 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="dnsmasq-dns" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.223136 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c340901-5ea2-4862-b54d-241fc0f003dd" containerName="dnsmasq-dns" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.223889 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.234894 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9mbqc"] Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.271399 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.271837 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnd8g\" (UniqueName: \"kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.330110 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8468-account-create-update-rmt9s"] Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.331516 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.333712 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.342079 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8468-account-create-update-rmt9s"] Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.373395 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.373464 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84ddm\" (UniqueName: \"kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.373651 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnd8g\" (UniqueName: \"kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.373689 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.374732 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.399372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnd8g\" (UniqueName: \"kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g\") pod \"placement-db-create-9mbqc\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.475435 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.475528 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84ddm\" (UniqueName: \"kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.476099 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.494712 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84ddm\" (UniqueName: \"kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm\") pod \"placement-8468-account-create-update-rmt9s\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.556963 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:56 crc kubenswrapper[4998]: I0203 08:59:56.651037 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.013736 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9mbqc"] Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.127994 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8468-account-create-update-rmt9s"] Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.686315 4998 generic.go:334] "Generic (PLEG): container finished" podID="9c1e0c01-3615-4acc-93cd-16ec35777578" containerID="c4e29ae12016a70ac65c7f04bb6e1ad62b1391bbb823c33996b9f1dbd51ebc3d" exitCode=0 Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.686429 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8468-account-create-update-rmt9s" event={"ID":"9c1e0c01-3615-4acc-93cd-16ec35777578","Type":"ContainerDied","Data":"c4e29ae12016a70ac65c7f04bb6e1ad62b1391bbb823c33996b9f1dbd51ebc3d"} Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.686465 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8468-account-create-update-rmt9s" event={"ID":"9c1e0c01-3615-4acc-93cd-16ec35777578","Type":"ContainerStarted","Data":"5f63712cc61dba3316b70419602b36b7d334ecc73b801679bae5d867e0401b1a"} Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.690317 4998 generic.go:334] "Generic (PLEG): container finished" podID="fd5331f1-33b8-454f-9d08-2ba013f7e25c" containerID="916d6a71c52755cdc0ef2441feab032eab2c50532cb4003e401f9255bda2a0c8" exitCode=0 Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.690374 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9mbqc" event={"ID":"fd5331f1-33b8-454f-9d08-2ba013f7e25c","Type":"ContainerDied","Data":"916d6a71c52755cdc0ef2441feab032eab2c50532cb4003e401f9255bda2a0c8"} Feb 03 08:59:57 crc kubenswrapper[4998]: I0203 08:59:57.690403 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9mbqc" event={"ID":"fd5331f1-33b8-454f-9d08-2ba013f7e25c","Type":"ContainerStarted","Data":"73947d23000a4997b03fad4a05e8ec913c01fa10ec930d32f7fb7a847cb34198"} Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.158491 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9mbqc" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.164306 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.232886 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnd8g\" (UniqueName: \"kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g\") pod \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.232944 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84ddm\" (UniqueName: \"kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm\") pod \"9c1e0c01-3615-4acc-93cd-16ec35777578\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.232980 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts\") pod \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\" (UID: \"fd5331f1-33b8-454f-9d08-2ba013f7e25c\") " Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.233051 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts\") pod \"9c1e0c01-3615-4acc-93cd-16ec35777578\" (UID: \"9c1e0c01-3615-4acc-93cd-16ec35777578\") " Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.233757 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c1e0c01-3615-4acc-93cd-16ec35777578" (UID: "9c1e0c01-3615-4acc-93cd-16ec35777578"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.234108 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd5331f1-33b8-454f-9d08-2ba013f7e25c" (UID: "fd5331f1-33b8-454f-9d08-2ba013f7e25c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.246622 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm" (OuterVolumeSpecName: "kube-api-access-84ddm") pod "9c1e0c01-3615-4acc-93cd-16ec35777578" (UID: "9c1e0c01-3615-4acc-93cd-16ec35777578"). InnerVolumeSpecName "kube-api-access-84ddm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.249057 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g" (OuterVolumeSpecName: "kube-api-access-rnd8g") pod "fd5331f1-33b8-454f-9d08-2ba013f7e25c" (UID: "fd5331f1-33b8-454f-9d08-2ba013f7e25c"). InnerVolumeSpecName "kube-api-access-rnd8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.335507 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c1e0c01-3615-4acc-93cd-16ec35777578-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.335579 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnd8g\" (UniqueName: \"kubernetes.io/projected/fd5331f1-33b8-454f-9d08-2ba013f7e25c-kube-api-access-rnd8g\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.335600 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84ddm\" (UniqueName: \"kubernetes.io/projected/9c1e0c01-3615-4acc-93cd-16ec35777578-kube-api-access-84ddm\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.335616 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd5331f1-33b8-454f-9d08-2ba013f7e25c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.709415 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8468-account-create-update-rmt9s" event={"ID":"9c1e0c01-3615-4acc-93cd-16ec35777578","Type":"ContainerDied","Data":"5f63712cc61dba3316b70419602b36b7d334ecc73b801679bae5d867e0401b1a"} Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.709468 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f63712cc61dba3316b70419602b36b7d334ecc73b801679bae5d867e0401b1a" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.709402 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8468-account-create-update-rmt9s" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.711448 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9mbqc" event={"ID":"fd5331f1-33b8-454f-9d08-2ba013f7e25c","Type":"ContainerDied","Data":"73947d23000a4997b03fad4a05e8ec913c01fa10ec930d32f7fb7a847cb34198"} Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.711478 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73947d23000a4997b03fad4a05e8ec913c01fa10ec930d32f7fb7a847cb34198" Feb 03 08:59:59 crc kubenswrapper[4998]: I0203 08:59:59.711503 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9mbqc" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.156998 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll"] Feb 03 09:00:00 crc kubenswrapper[4998]: E0203 09:00:00.157521 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5331f1-33b8-454f-9d08-2ba013f7e25c" containerName="mariadb-database-create" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.157547 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5331f1-33b8-454f-9d08-2ba013f7e25c" containerName="mariadb-database-create" Feb 03 09:00:00 crc kubenswrapper[4998]: E0203 09:00:00.157578 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1e0c01-3615-4acc-93cd-16ec35777578" containerName="mariadb-account-create-update" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.157586 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1e0c01-3615-4acc-93cd-16ec35777578" containerName="mariadb-account-create-update" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.157843 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1e0c01-3615-4acc-93cd-16ec35777578" containerName="mariadb-account-create-update" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.157871 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5331f1-33b8-454f-9d08-2ba013f7e25c" containerName="mariadb-database-create" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.158672 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.162318 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.166332 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll"] Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.167219 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.251116 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4b8z\" (UniqueName: \"kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.251208 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.251287 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.353897 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.354488 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.354687 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4b8z\" (UniqueName: \"kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.355739 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.360977 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.379149 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4b8z\" (UniqueName: \"kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z\") pod \"collect-profiles-29501820-9xfll\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.478124 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:00 crc kubenswrapper[4998]: I0203 09:00:00.934053 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll"] Feb 03 09:00:00 crc kubenswrapper[4998]: W0203 09:00:00.941387 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51a9c42d_e70a_4569_a2e3_cbc6696eef0f.slice/crio-c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9 WatchSource:0}: Error finding container c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9: Status 404 returned error can't find the container with id c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9 Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.660192 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.662476 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.676433 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.676491 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.676547 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv2tg\" (UniqueName: \"kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.676654 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.676874 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.679581 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.702074 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bd5gs"] Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.703652 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.705836 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.706937 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.707057 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-khcvf" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.719416 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bd5gs"] Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.733357 4998 generic.go:334] "Generic (PLEG): container finished" podID="51a9c42d-e70a-4569-a2e3-cbc6696eef0f" containerID="6bf4a79886b4d07d60e2283fd493beb8577b8f70cf887330e0378bb0f9ee43f3" exitCode=0 Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.733397 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" event={"ID":"51a9c42d-e70a-4569-a2e3-cbc6696eef0f","Type":"ContainerDied","Data":"6bf4a79886b4d07d60e2283fd493beb8577b8f70cf887330e0378bb0f9ee43f3"} Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.733420 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" event={"ID":"51a9c42d-e70a-4569-a2e3-cbc6696eef0f","Type":"ContainerStarted","Data":"c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9"} Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778683 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778731 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778803 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778837 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qc42\" (UniqueName: \"kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778878 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.778984 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779015 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779054 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779080 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779804 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779819 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779947 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.779959 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.780067 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv2tg\" (UniqueName: \"kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.800079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv2tg\" (UniqueName: \"kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg\") pod \"dnsmasq-dns-676b6c965c-hdjhr\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.882434 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.882566 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qc42\" (UniqueName: \"kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.882615 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.882672 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.882710 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.883236 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.885564 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.887348 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.889155 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.913595 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qc42\" (UniqueName: \"kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42\") pod \"placement-db-sync-bd5gs\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:01 crc kubenswrapper[4998]: I0203 09:00:01.994137 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:02 crc kubenswrapper[4998]: I0203 09:00:02.027216 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:02 crc kubenswrapper[4998]: I0203 09:00:02.491649 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:00:02 crc kubenswrapper[4998]: I0203 09:00:02.581425 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bd5gs"] Feb 03 09:00:02 crc kubenswrapper[4998]: I0203 09:00:02.743576 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bd5gs" event={"ID":"7c94ec51-b96c-405a-990b-acb3a0f85209","Type":"ContainerStarted","Data":"29fe61817605699f5d77de9625bcc0e36c30b65edd3152c97c0a3ead3db3c164"} Feb 03 09:00:02 crc kubenswrapper[4998]: I0203 09:00:02.748812 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" event={"ID":"b1901378-6afc-483b-9734-835bb9475ea1","Type":"ContainerStarted","Data":"c69bb846979ce9003e48939227f91e7b1a12aa6cec4a646df4a5b2eede0c463f"} Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:02.999971 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.105019 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4b8z\" (UniqueName: \"kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z\") pod \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.105082 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume\") pod \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.105109 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume\") pod \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\" (UID: \"51a9c42d-e70a-4569-a2e3-cbc6696eef0f\") " Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.105983 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume" (OuterVolumeSpecName: "config-volume") pod "51a9c42d-e70a-4569-a2e3-cbc6696eef0f" (UID: "51a9c42d-e70a-4569-a2e3-cbc6696eef0f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.111151 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z" (OuterVolumeSpecName: "kube-api-access-h4b8z") pod "51a9c42d-e70a-4569-a2e3-cbc6696eef0f" (UID: "51a9c42d-e70a-4569-a2e3-cbc6696eef0f"). InnerVolumeSpecName "kube-api-access-h4b8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.111242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "51a9c42d-e70a-4569-a2e3-cbc6696eef0f" (UID: "51a9c42d-e70a-4569-a2e3-cbc6696eef0f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.207868 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4b8z\" (UniqueName: \"kubernetes.io/projected/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-kube-api-access-h4b8z\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.207897 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.207906 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51a9c42d-e70a-4569-a2e3-cbc6696eef0f-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.758696 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" event={"ID":"51a9c42d-e70a-4569-a2e3-cbc6696eef0f","Type":"ContainerDied","Data":"c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9"} Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.759176 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c56cbe00121a6936c18ac9efef40056437e65fb874d90490e59815fe87f284f9" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.758721 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501820-9xfll" Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.760177 4998 generic.go:334] "Generic (PLEG): container finished" podID="b1901378-6afc-483b-9734-835bb9475ea1" containerID="f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29" exitCode=0 Feb 03 09:00:03 crc kubenswrapper[4998]: I0203 09:00:03.760223 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" event={"ID":"b1901378-6afc-483b-9734-835bb9475ea1","Type":"ContainerDied","Data":"f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29"} Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.081365 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2"] Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.089563 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501775-4t7h2"] Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.441340 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5980e16-37d6-44ff-83d8-2e37675b6cab" path="/var/lib/kubelet/pods/f5980e16-37d6-44ff-83d8-2e37675b6cab/volumes" Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.770200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" event={"ID":"b1901378-6afc-483b-9734-835bb9475ea1","Type":"ContainerStarted","Data":"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f"} Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.770370 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:04 crc kubenswrapper[4998]: I0203 09:00:04.797688 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" podStartSLOduration=3.797667685 podStartE2EDuration="3.797667685s" podCreationTimestamp="2026-02-03 09:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:00:04.787893728 +0000 UTC m=+8043.074587544" watchObservedRunningTime="2026-02-03 09:00:04.797667685 +0000 UTC m=+8043.084361501" Feb 03 09:00:06 crc kubenswrapper[4998]: I0203 09:00:06.795953 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bd5gs" event={"ID":"7c94ec51-b96c-405a-990b-acb3a0f85209","Type":"ContainerStarted","Data":"74c0ecb2d4a218c66bc96717b1c7e9e91e07bbebe7ec8f12aa6995374242094c"} Feb 03 09:00:06 crc kubenswrapper[4998]: I0203 09:00:06.828332 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bd5gs" podStartSLOduration=2.47744565 podStartE2EDuration="5.828307586s" podCreationTimestamp="2026-02-03 09:00:01 +0000 UTC" firstStartedPulling="2026-02-03 09:00:02.603197737 +0000 UTC m=+8040.889891543" lastFinishedPulling="2026-02-03 09:00:05.954059673 +0000 UTC m=+8044.240753479" observedRunningTime="2026-02-03 09:00:06.815697459 +0000 UTC m=+8045.102391295" watchObservedRunningTime="2026-02-03 09:00:06.828307586 +0000 UTC m=+8045.115001422" Feb 03 09:00:07 crc kubenswrapper[4998]: I0203 09:00:07.808590 4998 generic.go:334] "Generic (PLEG): container finished" podID="7c94ec51-b96c-405a-990b-acb3a0f85209" containerID="74c0ecb2d4a218c66bc96717b1c7e9e91e07bbebe7ec8f12aa6995374242094c" exitCode=0 Feb 03 09:00:07 crc kubenswrapper[4998]: I0203 09:00:07.808723 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bd5gs" event={"ID":"7c94ec51-b96c-405a-990b-acb3a0f85209","Type":"ContainerDied","Data":"74c0ecb2d4a218c66bc96717b1c7e9e91e07bbebe7ec8f12aa6995374242094c"} Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.195163 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.333588 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts\") pod \"7c94ec51-b96c-405a-990b-acb3a0f85209\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.333729 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data\") pod \"7c94ec51-b96c-405a-990b-acb3a0f85209\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.333845 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle\") pod \"7c94ec51-b96c-405a-990b-acb3a0f85209\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.333970 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs\") pod \"7c94ec51-b96c-405a-990b-acb3a0f85209\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.334088 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qc42\" (UniqueName: \"kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42\") pod \"7c94ec51-b96c-405a-990b-acb3a0f85209\" (UID: \"7c94ec51-b96c-405a-990b-acb3a0f85209\") " Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.334520 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs" (OuterVolumeSpecName: "logs") pod "7c94ec51-b96c-405a-990b-acb3a0f85209" (UID: "7c94ec51-b96c-405a-990b-acb3a0f85209"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.334925 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c94ec51-b96c-405a-990b-acb3a0f85209-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.340344 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42" (OuterVolumeSpecName: "kube-api-access-9qc42") pod "7c94ec51-b96c-405a-990b-acb3a0f85209" (UID: "7c94ec51-b96c-405a-990b-acb3a0f85209"). InnerVolumeSpecName "kube-api-access-9qc42". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.342992 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts" (OuterVolumeSpecName: "scripts") pod "7c94ec51-b96c-405a-990b-acb3a0f85209" (UID: "7c94ec51-b96c-405a-990b-acb3a0f85209"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.377373 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c94ec51-b96c-405a-990b-acb3a0f85209" (UID: "7c94ec51-b96c-405a-990b-acb3a0f85209"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.377881 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data" (OuterVolumeSpecName: "config-data") pod "7c94ec51-b96c-405a-990b-acb3a0f85209" (UID: "7c94ec51-b96c-405a-990b-acb3a0f85209"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.437943 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.437991 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qc42\" (UniqueName: \"kubernetes.io/projected/7c94ec51-b96c-405a-990b-acb3a0f85209-kube-api-access-9qc42\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.438011 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.438026 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c94ec51-b96c-405a-990b-acb3a0f85209-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.642889 4998 scope.go:117] "RemoveContainer" containerID="3fdd715487c2f9c409ba5da00903fd1c23189cc9db3450baf3962ec0bed6fbe5" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.829063 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bd5gs" event={"ID":"7c94ec51-b96c-405a-990b-acb3a0f85209","Type":"ContainerDied","Data":"29fe61817605699f5d77de9625bcc0e36c30b65edd3152c97c0a3ead3db3c164"} Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.829114 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29fe61817605699f5d77de9625bcc0e36c30b65edd3152c97c0a3ead3db3c164" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.829138 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bd5gs" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.909580 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-79f9fb6658-bmtrv"] Feb 03 09:00:09 crc kubenswrapper[4998]: E0203 09:00:09.909993 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c94ec51-b96c-405a-990b-acb3a0f85209" containerName="placement-db-sync" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.910007 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c94ec51-b96c-405a-990b-acb3a0f85209" containerName="placement-db-sync" Feb 03 09:00:09 crc kubenswrapper[4998]: E0203 09:00:09.910042 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51a9c42d-e70a-4569-a2e3-cbc6696eef0f" containerName="collect-profiles" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.910049 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="51a9c42d-e70a-4569-a2e3-cbc6696eef0f" containerName="collect-profiles" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.910237 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="51a9c42d-e70a-4569-a2e3-cbc6696eef0f" containerName="collect-profiles" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.910257 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c94ec51-b96c-405a-990b-acb3a0f85209" containerName="placement-db-sync" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.911496 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.914223 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.914515 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.914627 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-khcvf" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.927610 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-79f9fb6658-bmtrv"] Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.946613 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1657ff1a-5ff6-4191-b579-32e9168333d7-logs\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.946720 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxtbv\" (UniqueName: \"kubernetes.io/projected/1657ff1a-5ff6-4191-b579-32e9168333d7-kube-api-access-pxtbv\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.946748 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-config-data\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.946796 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-scripts\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:09 crc kubenswrapper[4998]: I0203 09:00:09.946850 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-combined-ca-bundle\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048236 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxtbv\" (UniqueName: \"kubernetes.io/projected/1657ff1a-5ff6-4191-b579-32e9168333d7-kube-api-access-pxtbv\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048299 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-config-data\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048342 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-scripts\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048367 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-combined-ca-bundle\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048444 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1657ff1a-5ff6-4191-b579-32e9168333d7-logs\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.048947 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1657ff1a-5ff6-4191-b579-32e9168333d7-logs\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.053866 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-scripts\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.054666 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-config-data\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.055535 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1657ff1a-5ff6-4191-b579-32e9168333d7-combined-ca-bundle\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.071000 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxtbv\" (UniqueName: \"kubernetes.io/projected/1657ff1a-5ff6-4191-b579-32e9168333d7-kube-api-access-pxtbv\") pod \"placement-79f9fb6658-bmtrv\" (UID: \"1657ff1a-5ff6-4191-b579-32e9168333d7\") " pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.232154 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.662572 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-79f9fb6658-bmtrv"] Feb 03 09:00:10 crc kubenswrapper[4998]: W0203 09:00:10.667419 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1657ff1a_5ff6_4191_b579_32e9168333d7.slice/crio-4570c3b8ee7cb3faeb5c965e5922267ef86919f2e54c69188a6645b812c95dd3 WatchSource:0}: Error finding container 4570c3b8ee7cb3faeb5c965e5922267ef86919f2e54c69188a6645b812c95dd3: Status 404 returned error can't find the container with id 4570c3b8ee7cb3faeb5c965e5922267ef86919f2e54c69188a6645b812c95dd3 Feb 03 09:00:10 crc kubenswrapper[4998]: I0203 09:00:10.840359 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-79f9fb6658-bmtrv" event={"ID":"1657ff1a-5ff6-4191-b579-32e9168333d7","Type":"ContainerStarted","Data":"4570c3b8ee7cb3faeb5c965e5922267ef86919f2e54c69188a6645b812c95dd3"} Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.851964 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-79f9fb6658-bmtrv" event={"ID":"1657ff1a-5ff6-4191-b579-32e9168333d7","Type":"ContainerStarted","Data":"99ad57951d0e732bdcb6a301af2df4dc12b63c17f908f108804c179cc2c5bec5"} Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.852024 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-79f9fb6658-bmtrv" event={"ID":"1657ff1a-5ff6-4191-b579-32e9168333d7","Type":"ContainerStarted","Data":"63c488c11ee74e638e2ec9f6c978960d42400c3e09c1b35625ba96a8ae233abe"} Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.852345 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.852630 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.896977 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-79f9fb6658-bmtrv" podStartSLOduration=2.896952776 podStartE2EDuration="2.896952776s" podCreationTimestamp="2026-02-03 09:00:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:00:11.876687821 +0000 UTC m=+8050.163381637" watchObservedRunningTime="2026-02-03 09:00:11.896952776 +0000 UTC m=+8050.183646592" Feb 03 09:00:11 crc kubenswrapper[4998]: I0203 09:00:11.996437 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:00:12 crc kubenswrapper[4998]: I0203 09:00:12.105837 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 09:00:12 crc kubenswrapper[4998]: I0203 09:00:12.106365 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="dnsmasq-dns" containerID="cri-o://f159a68d249149e532b4e23ed202fd8b3e27eeba75935c2c911ae310e321216f" gracePeriod=10 Feb 03 09:00:12 crc kubenswrapper[4998]: I0203 09:00:12.754662 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:00:12 crc kubenswrapper[4998]: I0203 09:00:12.755007 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:00:13 crc kubenswrapper[4998]: I0203 09:00:13.871167 4998 generic.go:334] "Generic (PLEG): container finished" podID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerID="f159a68d249149e532b4e23ed202fd8b3e27eeba75935c2c911ae310e321216f" exitCode=0 Feb 03 09:00:13 crc kubenswrapper[4998]: I0203 09:00:13.871238 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" event={"ID":"a08a9a82-c064-43c5-bef3-7f2d85cd18e5","Type":"ContainerDied","Data":"f159a68d249149e532b4e23ed202fd8b3e27eeba75935c2c911ae310e321216f"} Feb 03 09:00:13 crc kubenswrapper[4998]: I0203 09:00:13.871523 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" event={"ID":"a08a9a82-c064-43c5-bef3-7f2d85cd18e5","Type":"ContainerDied","Data":"0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691"} Feb 03 09:00:13 crc kubenswrapper[4998]: I0203 09:00:13.871548 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b4cf97aa32acbe7c502dde057b5ac0b71a9e338e6e9b3ab65d5c726137ce691" Feb 03 09:00:13 crc kubenswrapper[4998]: I0203 09:00:13.912301 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.044670 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc\") pod \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.044835 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb\") pod \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.044962 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb\") pod \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.044982 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl6s6\" (UniqueName: \"kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6\") pod \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.045038 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config\") pod \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\" (UID: \"a08a9a82-c064-43c5-bef3-7f2d85cd18e5\") " Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.053757 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6" (OuterVolumeSpecName: "kube-api-access-dl6s6") pod "a08a9a82-c064-43c5-bef3-7f2d85cd18e5" (UID: "a08a9a82-c064-43c5-bef3-7f2d85cd18e5"). InnerVolumeSpecName "kube-api-access-dl6s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.096582 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a08a9a82-c064-43c5-bef3-7f2d85cd18e5" (UID: "a08a9a82-c064-43c5-bef3-7f2d85cd18e5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.100278 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config" (OuterVolumeSpecName: "config") pod "a08a9a82-c064-43c5-bef3-7f2d85cd18e5" (UID: "a08a9a82-c064-43c5-bef3-7f2d85cd18e5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.121167 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a08a9a82-c064-43c5-bef3-7f2d85cd18e5" (UID: "a08a9a82-c064-43c5-bef3-7f2d85cd18e5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.121332 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a08a9a82-c064-43c5-bef3-7f2d85cd18e5" (UID: "a08a9a82-c064-43c5-bef3-7f2d85cd18e5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.146849 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.147712 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.147738 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.147752 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl6s6\" (UniqueName: \"kubernetes.io/projected/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-kube-api-access-dl6s6\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.147763 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08a9a82-c064-43c5-bef3-7f2d85cd18e5-config\") on node \"crc\" DevicePath \"\"" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.879713 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77f645c-4sb7w" Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.909024 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 09:00:14 crc kubenswrapper[4998]: I0203 09:00:14.917710 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c77f645c-4sb7w"] Feb 03 09:00:16 crc kubenswrapper[4998]: I0203 09:00:16.439455 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" path="/var/lib/kubelet/pods/a08a9a82-c064-43c5-bef3-7f2d85cd18e5/volumes" Feb 03 09:00:41 crc kubenswrapper[4998]: I0203 09:00:41.228174 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:41 crc kubenswrapper[4998]: I0203 09:00:41.241963 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-79f9fb6658-bmtrv" Feb 03 09:00:42 crc kubenswrapper[4998]: I0203 09:00:42.754668 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:00:42 crc kubenswrapper[4998]: I0203 09:00:42.754951 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.145700 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29501821-lp2m4"] Feb 03 09:01:00 crc kubenswrapper[4998]: E0203 09:01:00.146823 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="dnsmasq-dns" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.146840 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="dnsmasq-dns" Feb 03 09:01:00 crc kubenswrapper[4998]: E0203 09:01:00.146887 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="init" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.146896 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="init" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.147114 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a08a9a82-c064-43c5-bef3-7f2d85cd18e5" containerName="dnsmasq-dns" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.147892 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.170403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29501821-lp2m4"] Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.316503 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfdvw\" (UniqueName: \"kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.316676 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.317043 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.317106 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.419237 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.419456 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.419486 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.419527 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfdvw\" (UniqueName: \"kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.425991 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.426479 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.445121 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfdvw\" (UniqueName: \"kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.452952 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle\") pod \"keystone-cron-29501821-lp2m4\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.480329 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:00 crc kubenswrapper[4998]: I0203 09:01:00.998176 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29501821-lp2m4"] Feb 03 09:01:01 crc kubenswrapper[4998]: I0203 09:01:01.362386 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501821-lp2m4" event={"ID":"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c","Type":"ContainerStarted","Data":"ae948091ef8e122f514763d25b2e8c2e7f0d27e122e91adb199fe9cfd810dcac"} Feb 03 09:01:01 crc kubenswrapper[4998]: I0203 09:01:01.362446 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501821-lp2m4" event={"ID":"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c","Type":"ContainerStarted","Data":"22a3e5c6d6962cf6b9f27c37a2bd79f88eb7c74b7ae6f3a073210496c5f9e509"} Feb 03 09:01:01 crc kubenswrapper[4998]: I0203 09:01:01.384005 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29501821-lp2m4" podStartSLOduration=1.3839894099999999 podStartE2EDuration="1.38398941s" podCreationTimestamp="2026-02-03 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:01.377521067 +0000 UTC m=+8099.664214873" watchObservedRunningTime="2026-02-03 09:01:01.38398941 +0000 UTC m=+8099.670683216" Feb 03 09:01:03 crc kubenswrapper[4998]: I0203 09:01:03.380823 4998 generic.go:334] "Generic (PLEG): container finished" podID="b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" containerID="ae948091ef8e122f514763d25b2e8c2e7f0d27e122e91adb199fe9cfd810dcac" exitCode=0 Feb 03 09:01:03 crc kubenswrapper[4998]: I0203 09:01:03.380902 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501821-lp2m4" event={"ID":"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c","Type":"ContainerDied","Data":"ae948091ef8e122f514763d25b2e8c2e7f0d27e122e91adb199fe9cfd810dcac"} Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.758637 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gc8cr"] Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.760010 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.772369 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.779083 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gc8cr"] Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.865231 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-clppj"] Feb 03 09:01:04 crc kubenswrapper[4998]: E0203 09:01:04.865587 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" containerName="keystone-cron" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.865603 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" containerName="keystone-cron" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.865759 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" containerName="keystone-cron" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.866395 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.883620 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-clppj"] Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929037 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys\") pod \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929121 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle\") pod \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929161 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfdvw\" (UniqueName: \"kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw\") pod \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929203 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data\") pod \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\" (UID: \"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c\") " Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929594 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929674 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929734 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-878wp\" (UniqueName: \"kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.929775 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbhrq\" (UniqueName: \"kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.935339 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw" (OuterVolumeSpecName: "kube-api-access-hfdvw") pod "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" (UID: "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c"). InnerVolumeSpecName "kube-api-access-hfdvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.936421 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" (UID: "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.993706 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-13d7-account-create-update-jc8qq"] Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.993918 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" (UID: "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:04 crc kubenswrapper[4998]: I0203 09:01:04.995450 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.001737 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.011064 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-13d7-account-create-update-jc8qq"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.023242 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data" (OuterVolumeSpecName: "config-data") pod "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c" (UID: "b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031582 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-878wp\" (UniqueName: \"kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031642 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz76b\" (UniqueName: \"kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbhrq\" (UniqueName: \"kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031754 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031789 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031842 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031909 4998 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031920 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031938 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfdvw\" (UniqueName: \"kubernetes.io/projected/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-kube-api-access-hfdvw\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.031948 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.032596 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.033512 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.068740 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-878wp\" (UniqueName: \"kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp\") pod \"nova-api-db-create-gc8cr\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.069695 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbhrq\" (UniqueName: \"kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq\") pod \"nova-cell0-db-create-clppj\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.077821 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-db-create-sdzn8"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.079253 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.087375 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-db-create-sdzn8"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.088959 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.133246 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cscr\" (UniqueName: \"kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.133385 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz76b\" (UniqueName: \"kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.133523 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.133549 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.134498 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.153451 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz76b\" (UniqueName: \"kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b\") pod \"nova-api-13d7-account-create-update-jc8qq\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.181077 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-f8dc-account-create-update-442th"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.183861 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.185860 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.189246 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.240225 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.240586 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngthh\" (UniqueName: \"kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.240730 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.240869 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cscr\" (UniqueName: \"kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.241912 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.271995 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f8dc-account-create-update-442th"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.273851 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cscr\" (UniqueName: \"kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr\") pod \"nova-cell3-db-create-sdzn8\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.302127 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pz4fh"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.303867 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.313253 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pz4fh"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.344122 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mrfz\" (UniqueName: \"kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.344246 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.344274 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngthh\" (UniqueName: \"kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.344318 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.345237 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.380975 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngthh\" (UniqueName: \"kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh\") pod \"nova-cell0-f8dc-account-create-update-442th\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.386382 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-af76-account-create-update-l4c42"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.388099 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.389356 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.390694 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.396930 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.422229 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501821-lp2m4" event={"ID":"b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c","Type":"ContainerDied","Data":"22a3e5c6d6962cf6b9f27c37a2bd79f88eb7c74b7ae6f3a073210496c5f9e509"} Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.422277 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22a3e5c6d6962cf6b9f27c37a2bd79f88eb7c74b7ae6f3a073210496c5f9e509" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.422340 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501821-lp2m4" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.432137 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-db-create-dczdx"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.433220 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.453526 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6cj6\" (UniqueName: \"kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.453595 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.453833 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.454002 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mrfz\" (UniqueName: \"kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.458944 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.468012 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-db-create-dczdx"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.482184 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-af76-account-create-update-l4c42"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.483044 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mrfz\" (UniqueName: \"kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz\") pod \"nova-cell1-db-create-pz4fh\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.498158 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.518749 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-594c-account-create-update-wpnf2"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.520049 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.524680 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-db-secret" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.537577 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-594c-account-create-update-wpnf2"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.557668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.560308 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6tdt\" (UniqueName: \"kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.560698 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.560907 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6cj6\" (UniqueName: \"kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.563193 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99w22\" (UniqueName: \"kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.563548 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.567180 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.587251 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6cj6\" (UniqueName: \"kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6\") pod \"nova-cell1-af76-account-create-update-l4c42\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.631830 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.667853 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.667909 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6tdt\" (UniqueName: \"kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.667937 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.667975 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99w22\" (UniqueName: \"kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.668940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.669529 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.695464 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99w22\" (UniqueName: \"kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22\") pod \"nova-cell2-db-create-dczdx\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.729113 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6tdt\" (UniqueName: \"kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt\") pod \"nova-cell2-594c-account-create-update-wpnf2\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.730948 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.803635 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.829524 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-a861-account-create-update-4t7rm"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.856142 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.856182 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.886165 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-a861-account-create-update-4t7rm"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.889367 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-db-secret" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.904382 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-clppj"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.917881 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gc8cr"] Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.987896 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn6xt\" (UniqueName: \"kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:05 crc kubenswrapper[4998]: I0203 09:01:05.988085 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.089409 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.089515 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn6xt\" (UniqueName: \"kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.090247 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.125641 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn6xt\" (UniqueName: \"kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt\") pod \"nova-cell3-a861-account-create-update-4t7rm\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.144765 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-db-create-sdzn8"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.174482 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-13d7-account-create-update-jc8qq"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.267453 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.454758 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc8cr" event={"ID":"77ff460f-4a6b-4f54-987d-e3b87003e735","Type":"ContainerStarted","Data":"88b110efb177f72c1415962d3f641262f88abbaf621158a45c7187443cf347b9"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.455045 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc8cr" event={"ID":"77ff460f-4a6b-4f54-987d-e3b87003e735","Type":"ContainerStarted","Data":"5364bb6711d5b3204b5a630de1fd95849abca41655afec47503e4e737284cd8e"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.473990 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-clppj" event={"ID":"43447ea3-026c-476d-a8d0-f44de45d6e67","Type":"ContainerStarted","Data":"f4c40facc0946f455ed4775b6d0e5cb263c684cf95b573e2f41a8a7c803deba7"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.474050 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-clppj" event={"ID":"43447ea3-026c-476d-a8d0-f44de45d6e67","Type":"ContainerStarted","Data":"cbb9506363ad989c061c2547151271688ae60828a8393f81183b630b632675d2"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.475531 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-13d7-account-create-update-jc8qq" event={"ID":"392cb9be-6c09-4ec6-8615-6d9978b0dfc9","Type":"ContainerStarted","Data":"4cc896f4ae7ea5bb2bdcf1a4df91918512fef1befddd068a4b5095a1015f4fdd"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.480690 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-db-create-sdzn8" event={"ID":"d3975bdb-b42f-49ee-ac35-00ddcb9760af","Type":"ContainerStarted","Data":"44fe65a74703ed7d42bc9a7de8b9d8dd1751724073370364ed70fd611050347f"} Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.482611 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f8dc-account-create-update-442th"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.483184 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-gc8cr" podStartSLOduration=2.483166982 podStartE2EDuration="2.483166982s" podCreationTimestamp="2026-02-03 09:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:06.471182902 +0000 UTC m=+8104.757876708" watchObservedRunningTime="2026-02-03 09:01:06.483166982 +0000 UTC m=+8104.769860788" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.507172 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-clppj" podStartSLOduration=2.507147183 podStartE2EDuration="2.507147183s" podCreationTimestamp="2026-02-03 09:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:06.492681772 +0000 UTC m=+8104.779375578" watchObservedRunningTime="2026-02-03 09:01:06.507147183 +0000 UTC m=+8104.793840989" Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.564681 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pz4fh"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.579234 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-db-create-dczdx"] Feb 03 09:01:06 crc kubenswrapper[4998]: W0203 09:01:06.604727 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3408867_7c55_4f2a_ba1a_d47cc8dd38cb.slice/crio-2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a WatchSource:0}: Error finding container 2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a: Status 404 returned error can't find the container with id 2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.758812 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-af76-account-create-update-l4c42"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.771108 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-594c-account-create-update-wpnf2"] Feb 03 09:01:06 crc kubenswrapper[4998]: I0203 09:01:06.980494 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-a861-account-create-update-4t7rm"] Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.497289 4998 generic.go:334] "Generic (PLEG): container finished" podID="6b7abcb2-6609-4cbf-aee3-936f5deba7dd" containerID="afcdb300c233945c662f2e1e4e16a6874a957fece7358a837479ee0e15c1076d" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.497407 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-db-create-dczdx" event={"ID":"6b7abcb2-6609-4cbf-aee3-936f5deba7dd","Type":"ContainerDied","Data":"afcdb300c233945c662f2e1e4e16a6874a957fece7358a837479ee0e15c1076d"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.497440 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-db-create-dczdx" event={"ID":"6b7abcb2-6609-4cbf-aee3-936f5deba7dd","Type":"ContainerStarted","Data":"db00958b691227dc89c9d4039c9331ed1f1f459ff8234e9af7730ca9eefcae67"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.500629 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pz4fh" event={"ID":"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb","Type":"ContainerStarted","Data":"1af80b4f8265d321ee73a6fd821dc4c687f3882fe53e7b77676c2e5bc35ec87c"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.500683 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pz4fh" event={"ID":"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb","Type":"ContainerStarted","Data":"2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.502877 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8dc-account-create-update-442th" event={"ID":"c8770d44-dcad-42f2-8637-1cf4213b1358","Type":"ContainerStarted","Data":"e4226a46a622fe93175838739710ae8e71ddbaa0c3ec1f7e2436235e32d82646"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.502917 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8dc-account-create-update-442th" event={"ID":"c8770d44-dcad-42f2-8637-1cf4213b1358","Type":"ContainerStarted","Data":"7549f20773e20982c3eaa79e28fd6fccf0d04856e6a6210bb900cfff56311527"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.505226 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" event={"ID":"d14b7d22-8788-441a-9b12-a410d9622e74","Type":"ContainerStarted","Data":"50b843722819961839fed788494b0822c527d53d7ef00ed12ed55eaea20bb7cc"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.505259 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" event={"ID":"d14b7d22-8788-441a-9b12-a410d9622e74","Type":"ContainerStarted","Data":"5be1533783508fb4b6c3aa41c9ad495f6c5383f968a84f057b763fa2d2093f20"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.507193 4998 generic.go:334] "Generic (PLEG): container finished" podID="392cb9be-6c09-4ec6-8615-6d9978b0dfc9" containerID="0d7192364c123d3ec616888c63408e26da0a4d52e6d9646ecc38484da95a632b" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.507259 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-13d7-account-create-update-jc8qq" event={"ID":"392cb9be-6c09-4ec6-8615-6d9978b0dfc9","Type":"ContainerDied","Data":"0d7192364c123d3ec616888c63408e26da0a4d52e6d9646ecc38484da95a632b"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.510135 4998 generic.go:334] "Generic (PLEG): container finished" podID="f47f17fe-5026-4c73-8b4e-3ac63c890885" containerID="3cd98e6664289bf77a93bbf0de8fd9ff2ef9b6a0d7dbcb5239ade7bdc42f355c" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.510276 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-af76-account-create-update-l4c42" event={"ID":"f47f17fe-5026-4c73-8b4e-3ac63c890885","Type":"ContainerDied","Data":"3cd98e6664289bf77a93bbf0de8fd9ff2ef9b6a0d7dbcb5239ade7bdc42f355c"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.510308 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-af76-account-create-update-l4c42" event={"ID":"f47f17fe-5026-4c73-8b4e-3ac63c890885","Type":"ContainerStarted","Data":"afc1db19bfb4dd5cae17fd6c6e5698d691cb67260634325519ea5ef8a39a6705"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.518454 4998 generic.go:334] "Generic (PLEG): container finished" podID="d3975bdb-b42f-49ee-ac35-00ddcb9760af" containerID="93d79308ba28993bfcffa86b272cbe1cc9a9bc5a6b1bef7a9801bd7c3ad901cb" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.518534 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-db-create-sdzn8" event={"ID":"d3975bdb-b42f-49ee-ac35-00ddcb9760af","Type":"ContainerDied","Data":"93d79308ba28993bfcffa86b272cbe1cc9a9bc5a6b1bef7a9801bd7c3ad901cb"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.525606 4998 generic.go:334] "Generic (PLEG): container finished" podID="77ff460f-4a6b-4f54-987d-e3b87003e735" containerID="88b110efb177f72c1415962d3f641262f88abbaf621158a45c7187443cf347b9" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.525688 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc8cr" event={"ID":"77ff460f-4a6b-4f54-987d-e3b87003e735","Type":"ContainerDied","Data":"88b110efb177f72c1415962d3f641262f88abbaf621158a45c7187443cf347b9"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.532500 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" event={"ID":"a9526202-62b4-4bfe-8638-b80b72772f9d","Type":"ContainerStarted","Data":"a6822c74e466c3758a9034b89e0e5332e70a083a17479363b43c3411698aa852"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.532549 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" event={"ID":"a9526202-62b4-4bfe-8638-b80b72772f9d","Type":"ContainerStarted","Data":"e4c6ee13528433535809a65f743fcc0cfe4a3a53ec3ab0264345c9ce2f85927d"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.534521 4998 generic.go:334] "Generic (PLEG): container finished" podID="43447ea3-026c-476d-a8d0-f44de45d6e67" containerID="f4c40facc0946f455ed4775b6d0e5cb263c684cf95b573e2f41a8a7c803deba7" exitCode=0 Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.534560 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-clppj" event={"ID":"43447ea3-026c-476d-a8d0-f44de45d6e67","Type":"ContainerDied","Data":"f4c40facc0946f455ed4775b6d0e5cb263c684cf95b573e2f41a8a7c803deba7"} Feb 03 09:01:07 crc kubenswrapper[4998]: I0203 09:01:07.537023 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-f8dc-account-create-update-442th" podStartSLOduration=2.53700125 podStartE2EDuration="2.53700125s" podCreationTimestamp="2026-02-03 09:01:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:07.530581468 +0000 UTC m=+8105.817275294" watchObservedRunningTime="2026-02-03 09:01:07.53700125 +0000 UTC m=+8105.823695066" Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.544829 4998 generic.go:334] "Generic (PLEG): container finished" podID="d14b7d22-8788-441a-9b12-a410d9622e74" containerID="50b843722819961839fed788494b0822c527d53d7ef00ed12ed55eaea20bb7cc" exitCode=0 Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.545053 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" event={"ID":"d14b7d22-8788-441a-9b12-a410d9622e74","Type":"ContainerDied","Data":"50b843722819961839fed788494b0822c527d53d7ef00ed12ed55eaea20bb7cc"} Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.549037 4998 generic.go:334] "Generic (PLEG): container finished" podID="d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" containerID="1af80b4f8265d321ee73a6fd821dc4c687f3882fe53e7b77676c2e5bc35ec87c" exitCode=0 Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.549288 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pz4fh" event={"ID":"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb","Type":"ContainerDied","Data":"1af80b4f8265d321ee73a6fd821dc4c687f3882fe53e7b77676c2e5bc35ec87c"} Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.551563 4998 generic.go:334] "Generic (PLEG): container finished" podID="c8770d44-dcad-42f2-8637-1cf4213b1358" containerID="e4226a46a622fe93175838739710ae8e71ddbaa0c3ec1f7e2436235e32d82646" exitCode=0 Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.551935 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8dc-account-create-update-442th" event={"ID":"c8770d44-dcad-42f2-8637-1cf4213b1358","Type":"ContainerDied","Data":"e4226a46a622fe93175838739710ae8e71ddbaa0c3ec1f7e2436235e32d82646"} Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.563660 4998 generic.go:334] "Generic (PLEG): container finished" podID="a9526202-62b4-4bfe-8638-b80b72772f9d" containerID="a6822c74e466c3758a9034b89e0e5332e70a083a17479363b43c3411698aa852" exitCode=0 Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.563717 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" event={"ID":"a9526202-62b4-4bfe-8638-b80b72772f9d","Type":"ContainerDied","Data":"a6822c74e466c3758a9034b89e0e5332e70a083a17479363b43c3411698aa852"} Feb 03 09:01:08 crc kubenswrapper[4998]: I0203 09:01:08.991196 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.067854 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts\") pod \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.067954 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99w22\" (UniqueName: \"kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22\") pod \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\" (UID: \"6b7abcb2-6609-4cbf-aee3-936f5deba7dd\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.068405 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b7abcb2-6609-4cbf-aee3-936f5deba7dd" (UID: "6b7abcb2-6609-4cbf-aee3-936f5deba7dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.068803 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.086030 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22" (OuterVolumeSpecName: "kube-api-access-99w22") pod "6b7abcb2-6609-4cbf-aee3-936f5deba7dd" (UID: "6b7abcb2-6609-4cbf-aee3-936f5deba7dd"). InnerVolumeSpecName "kube-api-access-99w22". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.171022 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99w22\" (UniqueName: \"kubernetes.io/projected/6b7abcb2-6609-4cbf-aee3-936f5deba7dd-kube-api-access-99w22\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.268802 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.277603 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.284385 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.296514 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.301767 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.307844 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.329451 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.335800 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373665 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz76b\" (UniqueName: \"kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b\") pod \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373705 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts\") pod \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373749 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mrfz\" (UniqueName: \"kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz\") pod \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\" (UID: \"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373771 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cscr\" (UniqueName: \"kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr\") pod \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373897 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6tdt\" (UniqueName: \"kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt\") pod \"d14b7d22-8788-441a-9b12-a410d9622e74\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373918 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts\") pod \"d14b7d22-8788-441a-9b12-a410d9622e74\" (UID: \"d14b7d22-8788-441a-9b12-a410d9622e74\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373959 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts\") pod \"a9526202-62b4-4bfe-8638-b80b72772f9d\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373975 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbhrq\" (UniqueName: \"kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq\") pod \"43447ea3-026c-476d-a8d0-f44de45d6e67\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.373989 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-878wp\" (UniqueName: \"kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp\") pod \"77ff460f-4a6b-4f54-987d-e3b87003e735\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374006 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts\") pod \"f47f17fe-5026-4c73-8b4e-3ac63c890885\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374025 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts\") pod \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\" (UID: \"392cb9be-6c09-4ec6-8615-6d9978b0dfc9\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374043 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts\") pod \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\" (UID: \"d3975bdb-b42f-49ee-ac35-00ddcb9760af\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374078 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts\") pod \"77ff460f-4a6b-4f54-987d-e3b87003e735\" (UID: \"77ff460f-4a6b-4f54-987d-e3b87003e735\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374294 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn6xt\" (UniqueName: \"kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt\") pod \"a9526202-62b4-4bfe-8638-b80b72772f9d\" (UID: \"a9526202-62b4-4bfe-8638-b80b72772f9d\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374332 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6cj6\" (UniqueName: \"kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6\") pod \"f47f17fe-5026-4c73-8b4e-3ac63c890885\" (UID: \"f47f17fe-5026-4c73-8b4e-3ac63c890885\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.374392 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts\") pod \"43447ea3-026c-476d-a8d0-f44de45d6e67\" (UID: \"43447ea3-026c-476d-a8d0-f44de45d6e67\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.375032 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d3975bdb-b42f-49ee-ac35-00ddcb9760af" (UID: "d3975bdb-b42f-49ee-ac35-00ddcb9760af"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.375130 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43447ea3-026c-476d-a8d0-f44de45d6e67" (UID: "43447ea3-026c-476d-a8d0-f44de45d6e67"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.375500 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77ff460f-4a6b-4f54-987d-e3b87003e735" (UID: "77ff460f-4a6b-4f54-987d-e3b87003e735"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.377989 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f47f17fe-5026-4c73-8b4e-3ac63c890885" (UID: "f47f17fe-5026-4c73-8b4e-3ac63c890885"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.378069 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d14b7d22-8788-441a-9b12-a410d9622e74" (UID: "d14b7d22-8788-441a-9b12-a410d9622e74"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.378075 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "392cb9be-6c09-4ec6-8615-6d9978b0dfc9" (UID: "392cb9be-6c09-4ec6-8615-6d9978b0dfc9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.378159 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9526202-62b4-4bfe-8638-b80b72772f9d" (UID: "a9526202-62b4-4bfe-8638-b80b72772f9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.378376 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" (UID: "d3408867-7c55-4f2a-ba1a-d47cc8dd38cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.378473 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq" (OuterVolumeSpecName: "kube-api-access-gbhrq") pod "43447ea3-026c-476d-a8d0-f44de45d6e67" (UID: "43447ea3-026c-476d-a8d0-f44de45d6e67"). InnerVolumeSpecName "kube-api-access-gbhrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.379468 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp" (OuterVolumeSpecName: "kube-api-access-878wp") pod "77ff460f-4a6b-4f54-987d-e3b87003e735" (UID: "77ff460f-4a6b-4f54-987d-e3b87003e735"). InnerVolumeSpecName "kube-api-access-878wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.381048 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6" (OuterVolumeSpecName: "kube-api-access-p6cj6") pod "f47f17fe-5026-4c73-8b4e-3ac63c890885" (UID: "f47f17fe-5026-4c73-8b4e-3ac63c890885"). InnerVolumeSpecName "kube-api-access-p6cj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.381085 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt" (OuterVolumeSpecName: "kube-api-access-dn6xt") pod "a9526202-62b4-4bfe-8638-b80b72772f9d" (UID: "a9526202-62b4-4bfe-8638-b80b72772f9d"). InnerVolumeSpecName "kube-api-access-dn6xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.383550 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr" (OuterVolumeSpecName: "kube-api-access-7cscr") pod "d3975bdb-b42f-49ee-ac35-00ddcb9760af" (UID: "d3975bdb-b42f-49ee-ac35-00ddcb9760af"). InnerVolumeSpecName "kube-api-access-7cscr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.383630 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz" (OuterVolumeSpecName: "kube-api-access-9mrfz") pod "d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" (UID: "d3408867-7c55-4f2a-ba1a-d47cc8dd38cb"). InnerVolumeSpecName "kube-api-access-9mrfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.387016 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt" (OuterVolumeSpecName: "kube-api-access-r6tdt") pod "d14b7d22-8788-441a-9b12-a410d9622e74" (UID: "d14b7d22-8788-441a-9b12-a410d9622e74"). InnerVolumeSpecName "kube-api-access-r6tdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.391005 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b" (OuterVolumeSpecName: "kube-api-access-dz76b") pod "392cb9be-6c09-4ec6-8615-6d9978b0dfc9" (UID: "392cb9be-6c09-4ec6-8615-6d9978b0dfc9"). InnerVolumeSpecName "kube-api-access-dz76b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476598 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6tdt\" (UniqueName: \"kubernetes.io/projected/d14b7d22-8788-441a-9b12-a410d9622e74-kube-api-access-r6tdt\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476645 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d14b7d22-8788-441a-9b12-a410d9622e74-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476690 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9526202-62b4-4bfe-8638-b80b72772f9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476704 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbhrq\" (UniqueName: \"kubernetes.io/projected/43447ea3-026c-476d-a8d0-f44de45d6e67-kube-api-access-gbhrq\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476719 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-878wp\" (UniqueName: \"kubernetes.io/projected/77ff460f-4a6b-4f54-987d-e3b87003e735-kube-api-access-878wp\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476730 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f47f17fe-5026-4c73-8b4e-3ac63c890885-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476769 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476811 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3975bdb-b42f-49ee-ac35-00ddcb9760af-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476823 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ff460f-4a6b-4f54-987d-e3b87003e735-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476888 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn6xt\" (UniqueName: \"kubernetes.io/projected/a9526202-62b4-4bfe-8638-b80b72772f9d-kube-api-access-dn6xt\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476901 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6cj6\" (UniqueName: \"kubernetes.io/projected/f47f17fe-5026-4c73-8b4e-3ac63c890885-kube-api-access-p6cj6\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476912 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43447ea3-026c-476d-a8d0-f44de45d6e67-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476925 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz76b\" (UniqueName: \"kubernetes.io/projected/392cb9be-6c09-4ec6-8615-6d9978b0dfc9-kube-api-access-dz76b\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476938 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476972 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mrfz\" (UniqueName: \"kubernetes.io/projected/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb-kube-api-access-9mrfz\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.476983 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cscr\" (UniqueName: \"kubernetes.io/projected/d3975bdb-b42f-49ee-ac35-00ddcb9760af-kube-api-access-7cscr\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.573744 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pz4fh" event={"ID":"d3408867-7c55-4f2a-ba1a-d47cc8dd38cb","Type":"ContainerDied","Data":"2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.573772 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pz4fh" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.573799 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b9b246e9790dac12890fa39a0446ae2de8f9f403ebe85c3dc799f3bbec9308a" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.575167 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.575193 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-594c-account-create-update-wpnf2" event={"ID":"d14b7d22-8788-441a-9b12-a410d9622e74","Type":"ContainerDied","Data":"5be1533783508fb4b6c3aa41c9ad495f6c5383f968a84f057b763fa2d2093f20"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.575222 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be1533783508fb4b6c3aa41c9ad495f6c5383f968a84f057b763fa2d2093f20" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.576670 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-af76-account-create-update-l4c42" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.576666 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-af76-account-create-update-l4c42" event={"ID":"f47f17fe-5026-4c73-8b4e-3ac63c890885","Type":"ContainerDied","Data":"afc1db19bfb4dd5cae17fd6c6e5698d691cb67260634325519ea5ef8a39a6705"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.576776 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afc1db19bfb4dd5cae17fd6c6e5698d691cb67260634325519ea5ef8a39a6705" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.579544 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-db-create-dczdx" event={"ID":"6b7abcb2-6609-4cbf-aee3-936f5deba7dd","Type":"ContainerDied","Data":"db00958b691227dc89c9d4039c9331ed1f1f459ff8234e9af7730ca9eefcae67"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.579566 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db00958b691227dc89c9d4039c9331ed1f1f459ff8234e9af7730ca9eefcae67" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.579611 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-db-create-dczdx" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.583648 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.583651 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-a861-account-create-update-4t7rm" event={"ID":"a9526202-62b4-4bfe-8638-b80b72772f9d","Type":"ContainerDied","Data":"e4c6ee13528433535809a65f743fcc0cfe4a3a53ec3ab0264345c9ce2f85927d"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.583811 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4c6ee13528433535809a65f743fcc0cfe4a3a53ec3ab0264345c9ce2f85927d" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.586630 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-clppj" event={"ID":"43447ea3-026c-476d-a8d0-f44de45d6e67","Type":"ContainerDied","Data":"cbb9506363ad989c061c2547151271688ae60828a8393f81183b630b632675d2"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.586660 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbb9506363ad989c061c2547151271688ae60828a8393f81183b630b632675d2" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.586631 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-clppj" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.592325 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-13d7-account-create-update-jc8qq" event={"ID":"392cb9be-6c09-4ec6-8615-6d9978b0dfc9","Type":"ContainerDied","Data":"4cc896f4ae7ea5bb2bdcf1a4df91918512fef1befddd068a4b5095a1015f4fdd"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.592373 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cc896f4ae7ea5bb2bdcf1a4df91918512fef1befddd068a4b5095a1015f4fdd" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.592441 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-13d7-account-create-update-jc8qq" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.594137 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-db-create-sdzn8" event={"ID":"d3975bdb-b42f-49ee-ac35-00ddcb9760af","Type":"ContainerDied","Data":"44fe65a74703ed7d42bc9a7de8b9d8dd1751724073370364ed70fd611050347f"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.594169 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44fe65a74703ed7d42bc9a7de8b9d8dd1751724073370364ed70fd611050347f" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.599551 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-db-create-sdzn8" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.606649 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc8cr" event={"ID":"77ff460f-4a6b-4f54-987d-e3b87003e735","Type":"ContainerDied","Data":"5364bb6711d5b3204b5a630de1fd95849abca41655afec47503e4e737284cd8e"} Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.606699 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5364bb6711d5b3204b5a630de1fd95849abca41655afec47503e4e737284cd8e" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.606669 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc8cr" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.864359 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.987688 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts\") pod \"c8770d44-dcad-42f2-8637-1cf4213b1358\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.987918 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngthh\" (UniqueName: \"kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh\") pod \"c8770d44-dcad-42f2-8637-1cf4213b1358\" (UID: \"c8770d44-dcad-42f2-8637-1cf4213b1358\") " Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.988590 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c8770d44-dcad-42f2-8637-1cf4213b1358" (UID: "c8770d44-dcad-42f2-8637-1cf4213b1358"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:09 crc kubenswrapper[4998]: I0203 09:01:09.992592 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh" (OuterVolumeSpecName: "kube-api-access-ngthh") pod "c8770d44-dcad-42f2-8637-1cf4213b1358" (UID: "c8770d44-dcad-42f2-8637-1cf4213b1358"). InnerVolumeSpecName "kube-api-access-ngthh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:10 crc kubenswrapper[4998]: I0203 09:01:10.089950 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngthh\" (UniqueName: \"kubernetes.io/projected/c8770d44-dcad-42f2-8637-1cf4213b1358-kube-api-access-ngthh\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:10 crc kubenswrapper[4998]: I0203 09:01:10.090191 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8770d44-dcad-42f2-8637-1cf4213b1358-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:10 crc kubenswrapper[4998]: I0203 09:01:10.615877 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8dc-account-create-update-442th" event={"ID":"c8770d44-dcad-42f2-8637-1cf4213b1358","Type":"ContainerDied","Data":"7549f20773e20982c3eaa79e28fd6fccf0d04856e6a6210bb900cfff56311527"} Feb 03 09:01:10 crc kubenswrapper[4998]: I0203 09:01:10.615913 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7549f20773e20982c3eaa79e28fd6fccf0d04856e6a6210bb900cfff56311527" Feb 03 09:01:10 crc kubenswrapper[4998]: I0203 09:01:10.615964 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8dc-account-create-update-442th" Feb 03 09:01:12 crc kubenswrapper[4998]: I0203 09:01:12.754928 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:01:12 crc kubenswrapper[4998]: I0203 09:01:12.754985 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:01:12 crc kubenswrapper[4998]: I0203 09:01:12.755022 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 09:01:12 crc kubenswrapper[4998]: I0203 09:01:12.755676 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 09:01:12 crc kubenswrapper[4998]: I0203 09:01:12.755737 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" gracePeriod=600 Feb 03 09:01:12 crc kubenswrapper[4998]: E0203 09:01:12.886006 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:01:13 crc kubenswrapper[4998]: I0203 09:01:13.646041 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" exitCode=0 Feb 03 09:01:13 crc kubenswrapper[4998]: I0203 09:01:13.646090 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77"} Feb 03 09:01:13 crc kubenswrapper[4998]: I0203 09:01:13.646162 4998 scope.go:117] "RemoveContainer" containerID="904a99122771df02837e79d9f49d4714e829d6f397087b6959b9dca5b129d115" Feb 03 09:01:13 crc kubenswrapper[4998]: I0203 09:01:13.646801 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:01:13 crc kubenswrapper[4998]: E0203 09:01:13.647056 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566046 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rr8jz"] Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.566868 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566886 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.566899 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3975bdb-b42f-49ee-ac35-00ddcb9760af" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566906 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3975bdb-b42f-49ee-ac35-00ddcb9760af" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.566926 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7abcb2-6609-4cbf-aee3-936f5deba7dd" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566933 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7abcb2-6609-4cbf-aee3-936f5deba7dd" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.566948 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9526202-62b4-4bfe-8638-b80b72772f9d" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566955 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9526202-62b4-4bfe-8638-b80b72772f9d" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.566981 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43447ea3-026c-476d-a8d0-f44de45d6e67" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.566987 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="43447ea3-026c-476d-a8d0-f44de45d6e67" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.567006 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f47f17fe-5026-4c73-8b4e-3ac63c890885" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567014 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="f47f17fe-5026-4c73-8b4e-3ac63c890885" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.567032 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="392cb9be-6c09-4ec6-8615-6d9978b0dfc9" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567042 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="392cb9be-6c09-4ec6-8615-6d9978b0dfc9" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.567057 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8770d44-dcad-42f2-8637-1cf4213b1358" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567066 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8770d44-dcad-42f2-8637-1cf4213b1358" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.567086 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d14b7d22-8788-441a-9b12-a410d9622e74" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567096 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="d14b7d22-8788-441a-9b12-a410d9622e74" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: E0203 09:01:15.567111 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ff460f-4a6b-4f54-987d-e3b87003e735" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567119 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ff460f-4a6b-4f54-987d-e3b87003e735" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567338 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567357 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="392cb9be-6c09-4ec6-8615-6d9978b0dfc9" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567372 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="d14b7d22-8788-441a-9b12-a410d9622e74" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567388 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b7abcb2-6609-4cbf-aee3-936f5deba7dd" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567403 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9526202-62b4-4bfe-8638-b80b72772f9d" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567411 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3975bdb-b42f-49ee-ac35-00ddcb9760af" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567426 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ff460f-4a6b-4f54-987d-e3b87003e735" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567436 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="43447ea3-026c-476d-a8d0-f44de45d6e67" containerName="mariadb-database-create" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567453 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="f47f17fe-5026-4c73-8b4e-3ac63c890885" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.567466 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8770d44-dcad-42f2-8637-1cf4213b1358" containerName="mariadb-account-create-update" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.568278 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.572876 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.573148 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qsdpf" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.573268 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.586186 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rr8jz"] Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.689431 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.689668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.689798 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrnt9\" (UniqueName: \"kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.689891 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.792407 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.792551 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.792623 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.792680 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrnt9\" (UniqueName: \"kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.798821 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.799767 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.807749 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.813849 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrnt9\" (UniqueName: \"kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9\") pod \"nova-cell0-conductor-db-sync-rr8jz\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:15 crc kubenswrapper[4998]: I0203 09:01:15.906143 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:16 crc kubenswrapper[4998]: I0203 09:01:16.361909 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rr8jz"] Feb 03 09:01:16 crc kubenswrapper[4998]: I0203 09:01:16.365709 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 09:01:16 crc kubenswrapper[4998]: I0203 09:01:16.672440 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" event={"ID":"cffdc337-9f79-401a-9af2-8f319a5ed5fb","Type":"ContainerStarted","Data":"a356c2d19e7755e8d9504af4321a348cb1356acef9e12a7249f0d9da8296f964"} Feb 03 09:01:26 crc kubenswrapper[4998]: I0203 09:01:26.428687 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:01:26 crc kubenswrapper[4998]: E0203 09:01:26.429911 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:01:26 crc kubenswrapper[4998]: I0203 09:01:26.770411 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" event={"ID":"cffdc337-9f79-401a-9af2-8f319a5ed5fb","Type":"ContainerStarted","Data":"aa8f6ff9d22019cb55c266a5894253e9509846bd5c8842794f8205872537ef05"} Feb 03 09:01:26 crc kubenswrapper[4998]: I0203 09:01:26.799742 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" podStartSLOduration=2.486742105 podStartE2EDuration="11.799724556s" podCreationTimestamp="2026-02-03 09:01:15 +0000 UTC" firstStartedPulling="2026-02-03 09:01:16.365518385 +0000 UTC m=+8114.652212191" lastFinishedPulling="2026-02-03 09:01:25.678500836 +0000 UTC m=+8123.965194642" observedRunningTime="2026-02-03 09:01:26.789976139 +0000 UTC m=+8125.076669965" watchObservedRunningTime="2026-02-03 09:01:26.799724556 +0000 UTC m=+8125.086418362" Feb 03 09:01:31 crc kubenswrapper[4998]: I0203 09:01:31.817918 4998 generic.go:334] "Generic (PLEG): container finished" podID="cffdc337-9f79-401a-9af2-8f319a5ed5fb" containerID="aa8f6ff9d22019cb55c266a5894253e9509846bd5c8842794f8205872537ef05" exitCode=0 Feb 03 09:01:31 crc kubenswrapper[4998]: I0203 09:01:31.818008 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" event={"ID":"cffdc337-9f79-401a-9af2-8f319a5ed5fb","Type":"ContainerDied","Data":"aa8f6ff9d22019cb55c266a5894253e9509846bd5c8842794f8205872537ef05"} Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.223604 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.419711 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data\") pod \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.419855 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrnt9\" (UniqueName: \"kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9\") pod \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.419921 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts\") pod \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.419981 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") pod \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.426124 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts" (OuterVolumeSpecName: "scripts") pod "cffdc337-9f79-401a-9af2-8f319a5ed5fb" (UID: "cffdc337-9f79-401a-9af2-8f319a5ed5fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.427470 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9" (OuterVolumeSpecName: "kube-api-access-rrnt9") pod "cffdc337-9f79-401a-9af2-8f319a5ed5fb" (UID: "cffdc337-9f79-401a-9af2-8f319a5ed5fb"). InnerVolumeSpecName "kube-api-access-rrnt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:33 crc kubenswrapper[4998]: E0203 09:01:33.446610 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle podName:cffdc337-9f79-401a-9af2-8f319a5ed5fb nodeName:}" failed. No retries permitted until 2026-02-03 09:01:33.946578177 +0000 UTC m=+8132.233271993 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle") pod "cffdc337-9f79-401a-9af2-8f319a5ed5fb" (UID: "cffdc337-9f79-401a-9af2-8f319a5ed5fb") : error deleting /var/lib/kubelet/pods/cffdc337-9f79-401a-9af2-8f319a5ed5fb/volume-subpaths: remove /var/lib/kubelet/pods/cffdc337-9f79-401a-9af2-8f319a5ed5fb/volume-subpaths: no such file or directory Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.450833 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data" (OuterVolumeSpecName: "config-data") pod "cffdc337-9f79-401a-9af2-8f319a5ed5fb" (UID: "cffdc337-9f79-401a-9af2-8f319a5ed5fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.522439 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrnt9\" (UniqueName: \"kubernetes.io/projected/cffdc337-9f79-401a-9af2-8f319a5ed5fb-kube-api-access-rrnt9\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.522473 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.522487 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.838310 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" event={"ID":"cffdc337-9f79-401a-9af2-8f319a5ed5fb","Type":"ContainerDied","Data":"a356c2d19e7755e8d9504af4321a348cb1356acef9e12a7249f0d9da8296f964"} Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.838385 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a356c2d19e7755e8d9504af4321a348cb1356acef9e12a7249f0d9da8296f964" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.838555 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rr8jz" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.923897 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 09:01:33 crc kubenswrapper[4998]: E0203 09:01:33.924435 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cffdc337-9f79-401a-9af2-8f319a5ed5fb" containerName="nova-cell0-conductor-db-sync" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.924458 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="cffdc337-9f79-401a-9af2-8f319a5ed5fb" containerName="nova-cell0-conductor-db-sync" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.924712 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="cffdc337-9f79-401a-9af2-8f319a5ed5fb" containerName="nova-cell0-conductor-db-sync" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.925564 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:33 crc kubenswrapper[4998]: I0203 09:01:33.934100 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.030871 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") pod \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\" (UID: \"cffdc337-9f79-401a-9af2-8f319a5ed5fb\") " Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.031177 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fncc\" (UniqueName: \"kubernetes.io/projected/57819439-d059-440d-a0ea-2d224cd27173-kube-api-access-5fncc\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.031311 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.031387 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.034134 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cffdc337-9f79-401a-9af2-8f319a5ed5fb" (UID: "cffdc337-9f79-401a-9af2-8f319a5ed5fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.133367 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.133835 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.133893 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fncc\" (UniqueName: \"kubernetes.io/projected/57819439-d059-440d-a0ea-2d224cd27173-kube-api-access-5fncc\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.134054 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffdc337-9f79-401a-9af2-8f319a5ed5fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.137413 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.139659 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57819439-d059-440d-a0ea-2d224cd27173-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.153804 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fncc\" (UniqueName: \"kubernetes.io/projected/57819439-d059-440d-a0ea-2d224cd27173-kube-api-access-5fncc\") pod \"nova-cell0-conductor-0\" (UID: \"57819439-d059-440d-a0ea-2d224cd27173\") " pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.249275 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.675158 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 09:01:34 crc kubenswrapper[4998]: W0203 09:01:34.688556 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57819439_d059_440d_a0ea_2d224cd27173.slice/crio-fe771b970586593577c6d94083a86adffd030ca5771bb3cf4a5a02fdc2cbf432 WatchSource:0}: Error finding container fe771b970586593577c6d94083a86adffd030ca5771bb3cf4a5a02fdc2cbf432: Status 404 returned error can't find the container with id fe771b970586593577c6d94083a86adffd030ca5771bb3cf4a5a02fdc2cbf432 Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.848547 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"57819439-d059-440d-a0ea-2d224cd27173","Type":"ContainerStarted","Data":"69872b6f5b79e2f0ba0d5abe8e8b7f983e96d36f42609d79bfd166d12f6b5311"} Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.848593 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"57819439-d059-440d-a0ea-2d224cd27173","Type":"ContainerStarted","Data":"fe771b970586593577c6d94083a86adffd030ca5771bb3cf4a5a02fdc2cbf432"} Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.848710 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:34 crc kubenswrapper[4998]: I0203 09:01:34.868304 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.868278798 podStartE2EDuration="1.868278798s" podCreationTimestamp="2026-02-03 09:01:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:34.863286816 +0000 UTC m=+8133.149980622" watchObservedRunningTime="2026-02-03 09:01:34.868278798 +0000 UTC m=+8133.154972604" Feb 03 09:01:37 crc kubenswrapper[4998]: I0203 09:01:37.427797 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:01:37 crc kubenswrapper[4998]: E0203 09:01:37.428410 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.273471 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.694489 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-gq6wg"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.695868 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.701084 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.701603 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.720998 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-gq6wg"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.849155 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.850456 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.853478 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.859895 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcndh\" (UniqueName: \"kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.859968 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.860074 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.860119 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.877240 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.900154 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.901852 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.912354 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.935868 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.961888 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.961999 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.962058 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.962137 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.962168 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcndh\" (UniqueName: \"kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.962196 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkrfb\" (UniqueName: \"kubernetes.io/projected/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-kube-api-access-qkrfb\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.962239 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.967509 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.982341 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.989619 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:39 crc kubenswrapper[4998]: I0203 09:01:39.992102 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcndh\" (UniqueName: \"kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh\") pod \"nova-cell0-cell-mapping-gq6wg\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.036229 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.040419 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.042339 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.048255 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.058879 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.060933 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.072106 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.073770 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.073929 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq498\" (UniqueName: \"kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.074006 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.074276 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.074399 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.074440 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkrfb\" (UniqueName: \"kubernetes.io/projected/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-kube-api-access-qkrfb\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.074528 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.091674 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.091756 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.095533 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.130928 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.140421 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkrfb\" (UniqueName: \"kubernetes.io/projected/033d6aaa-cca8-4ec3-af93-1b03bfb969ee-kube-api-access-qkrfb\") pod \"nova-cell1-novncproxy-0\" (UID: \"033d6aaa-cca8-4ec3-af93-1b03bfb969ee\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.170035 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.193964 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194038 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194072 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194100 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x8z7\" (UniqueName: \"kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194135 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194162 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq498\" (UniqueName: \"kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194254 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194286 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvbnc\" (UniqueName: \"kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194325 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194359 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.194386 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.195349 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.198510 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.210288 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.222084 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq498\" (UniqueName: \"kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498\") pod \"nova-api-0\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.234459 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.258048 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.259869 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.282863 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-novncproxy-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.284398 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.287316 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-novncproxy-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300555 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvbnc\" (UniqueName: \"kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300633 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300655 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300761 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300812 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300835 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x8z7\" (UniqueName: \"kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.300964 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.301843 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.306115 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.309869 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.317106 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.317675 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.329036 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.338809 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvbnc\" (UniqueName: \"kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc\") pod \"nova-metadata-0\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.338935 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-novncproxy-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.347585 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x8z7\" (UniqueName: \"kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7\") pod \"nova-scheduler-0\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.363024 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-novncproxy-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.364731 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.368576 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-novncproxy-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.397748 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-novncproxy-0"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.404951 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-config-data\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.405035 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmb7z\" (UniqueName: \"kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406013 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406048 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-combined-ca-bundle\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406108 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406521 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406562 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb47r\" (UniqueName: \"kubernetes.io/projected/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-kube-api-access-rb47r\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.406760 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.508969 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509083 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509123 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb47r\" (UniqueName: \"kubernetes.io/projected/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-kube-api-access-rb47r\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509189 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-config-data\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509214 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509237 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-combined-ca-bundle\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509269 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-config-data\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.509309 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmb7z\" (UniqueName: \"kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.512317 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd2lj\" (UniqueName: \"kubernetes.io/projected/7cefac34-9996-45f6-9093-5fdf673be4ab-kube-api-access-fd2lj\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.512359 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.512391 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-combined-ca-bundle\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.512447 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.512752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.513525 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.513956 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.518322 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-combined-ca-bundle\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.531878 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-config-data\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.533373 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb47r\" (UniqueName: \"kubernetes.io/projected/ffd397d0-f09c-4352-bdeb-2be3b4d6102f-kube-api-access-rb47r\") pod \"nova-cell2-novncproxy-0\" (UID: \"ffd397d0-f09c-4352-bdeb-2be3b4d6102f\") " pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.535355 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmb7z\" (UniqueName: \"kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z\") pod \"dnsmasq-dns-57d9d87dc5-tsm89\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.552430 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.596556 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.621406 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-config-data\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.621451 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-combined-ca-bundle\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.621577 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd2lj\" (UniqueName: \"kubernetes.io/projected/7cefac34-9996-45f6-9093-5fdf673be4ab-kube-api-access-fd2lj\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.626307 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-combined-ca-bundle\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.626468 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cefac34-9996-45f6-9093-5fdf673be4ab-config-data\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.639551 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.650352 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd2lj\" (UniqueName: \"kubernetes.io/projected/7cefac34-9996-45f6-9093-5fdf673be4ab-kube-api-access-fd2lj\") pod \"nova-cell3-novncproxy-0\" (UID: \"7cefac34-9996-45f6-9093-5fdf673be4ab\") " pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.651985 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.692703 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.768135 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-gq6wg"] Feb 03 09:01:40 crc kubenswrapper[4998]: W0203 09:01:40.784715 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee0c9709_6a32_4f5b_8458_cdb322580330.slice/crio-797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c WatchSource:0}: Error finding container 797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c: Status 404 returned error can't find the container with id 797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.870686 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46zpr"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.871902 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.878246 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.878293 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.931164 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46zpr"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.958797 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-gq6wg" event={"ID":"ee0c9709-6a32-4f5b-8458-cdb322580330","Type":"ContainerStarted","Data":"797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c"} Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.975403 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-conductor-db-sync-v9jd9"] Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.976634 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.979402 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-conductor-config-data" Feb 03 09:01:40 crc kubenswrapper[4998]: I0203 09:01:40.979576 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-conductor-scripts" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.031243 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-conductor-db-sync-v9jd9"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.037275 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.037851 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvchh\" (UniqueName: \"kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.038485 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.038647 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.043314 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.062946 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.095370 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-conductor-db-sync-jk9gl"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.096876 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.100216 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-conductor-scripts" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.100288 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-conductor-config-data" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142358 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142430 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvchh\" (UniqueName: \"kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142460 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142516 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142544 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prmm4\" (UniqueName: \"kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.142616 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.144253 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.144362 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.149798 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-conductor-db-sync-jk9gl"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.152554 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.152882 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.160940 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvchh\" (UniqueName: \"kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.185079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46zpr\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.202632 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.259338 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.260242 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.260644 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.260823 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnk5x\" (UniqueName: \"kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.261201 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.261284 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.261313 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prmm4\" (UniqueName: \"kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.261473 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.261646 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.266735 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.267284 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.281950 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.303906 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prmm4\" (UniqueName: \"kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4\") pod \"nova-cell2-conductor-db-sync-v9jd9\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.363577 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.363700 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.363736 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnk5x\" (UniqueName: \"kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.363765 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.367745 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.370315 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.374307 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.392545 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnk5x\" (UniqueName: \"kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x\") pod \"nova-cell3-conductor-db-sync-jk9gl\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.408001 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.430955 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.625518 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.681116 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-novncproxy-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: W0203 09:01:41.686420 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd397d0_f09c_4352_bdeb_2be3b4d6102f.slice/crio-8c718fc8196151466b09393f249f6ab64d4fb6545b55fdec6148d075498a15c6 WatchSource:0}: Error finding container 8c718fc8196151466b09393f249f6ab64d4fb6545b55fdec6148d075498a15c6: Status 404 returned error can't find the container with id 8c718fc8196151466b09393f249f6ab64d4fb6545b55fdec6148d075498a15c6 Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.692027 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.700137 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-novncproxy-0"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.948216 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46zpr"] Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.973459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" event={"ID":"2dae9d3a-7f7b-42de-bbf0-020df791db8d","Type":"ContainerStarted","Data":"de9c6dc3daa85f62d459f2a8df84687c1a60df3310441a887a47b994f62ec73f"} Feb 03 09:01:41 crc kubenswrapper[4998]: W0203 09:01:41.973935 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fb98f13_657a_4351_a9f6_11dcfd10d016.slice/crio-e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340 WatchSource:0}: Error finding container e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340: Status 404 returned error can't find the container with id e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340 Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.975327 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-novncproxy-0" event={"ID":"ffd397d0-f09c-4352-bdeb-2be3b4d6102f","Type":"ContainerStarted","Data":"8c718fc8196151466b09393f249f6ab64d4fb6545b55fdec6148d075498a15c6"} Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.976742 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-gq6wg" event={"ID":"ee0c9709-6a32-4f5b-8458-cdb322580330","Type":"ContainerStarted","Data":"d5d68ae0a0a8463547b89e40bc5c0a61fabc5d245b996ead04f399aaa14afeea"} Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.979648 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"033d6aaa-cca8-4ec3-af93-1b03bfb969ee","Type":"ContainerStarted","Data":"a48fa5f7fbabe7c1ea89e65c3016a97bfa923ddb20fc85596758a1ff8b3920c2"} Feb 03 09:01:41 crc kubenswrapper[4998]: I0203 09:01:41.985881 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-novncproxy-0" event={"ID":"7cefac34-9996-45f6-9093-5fdf673be4ab","Type":"ContainerStarted","Data":"dc3b1e8a558710f10401ccdd41659abdf77ee40ce9ad2b3ffc770e2126aae20e"} Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:41.997944 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-gq6wg" podStartSLOduration=2.997912393 podStartE2EDuration="2.997912393s" podCreationTimestamp="2026-02-03 09:01:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:41.994305991 +0000 UTC m=+8140.280999797" watchObservedRunningTime="2026-02-03 09:01:41.997912393 +0000 UTC m=+8140.284606199" Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:42.010441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3ff0e2-3700-4bea-beef-1cedd521227a","Type":"ContainerStarted","Data":"db3c73dffecf780d58267d808387878bec3f08f35eb5ed47a40cecc573b2a196"} Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:42.030544 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerStarted","Data":"e0c22d6624bb7a4bfae06218e5bce8a6d1ef1793d70286e57088d27a4df77971"} Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:42.032451 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerStarted","Data":"9a3f6f7e4b26d0176e20e7e19f00e2cd8f3ca346e74853b71309c74662b4b4be"} Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:42.032619 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-conductor-db-sync-v9jd9"] Feb 03 09:01:42 crc kubenswrapper[4998]: W0203 09:01:42.069547 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b45c128_79bc_48cf_81f8_de28ca277c36.slice/crio-2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d WatchSource:0}: Error finding container 2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d: Status 404 returned error can't find the container with id 2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d Feb 03 09:01:42 crc kubenswrapper[4998]: I0203 09:01:42.179012 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-conductor-db-sync-jk9gl"] Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.066753 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46zpr" event={"ID":"1fb98f13-657a-4351-a9f6-11dcfd10d016","Type":"ContainerStarted","Data":"f30c12123cdef6d56802a25fd5057466ad4d93fbf457729f54850cf52fed68b1"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.067074 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46zpr" event={"ID":"1fb98f13-657a-4351-a9f6-11dcfd10d016","Type":"ContainerStarted","Data":"e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.078126 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" event={"ID":"2dae9d3a-7f7b-42de-bbf0-020df791db8d","Type":"ContainerDied","Data":"3c5d5c7355cb33b7aaf41f341d08618393148899646e19c5c7980b727d0bceaa"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.078193 4998 generic.go:334] "Generic (PLEG): container finished" podID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerID="3c5d5c7355cb33b7aaf41f341d08618393148899646e19c5c7980b727d0bceaa" exitCode=0 Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.083161 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" event={"ID":"a7243f69-0c30-44db-b97a-c287bca3afff","Type":"ContainerStarted","Data":"b713d56296ee7e37e353d41b8aff00784e66f07f95f6ca6b422f4aecda0cf555"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.083255 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" event={"ID":"a7243f69-0c30-44db-b97a-c287bca3afff","Type":"ContainerStarted","Data":"903d3054fc626c7b5b24ed961b92094b2d8fb240b11c2101da0ad236cb57ccf6"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.099969 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-46zpr" podStartSLOduration=3.09994571 podStartE2EDuration="3.09994571s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:43.095570095 +0000 UTC m=+8141.382263921" watchObservedRunningTime="2026-02-03 09:01:43.09994571 +0000 UTC m=+8141.386639526" Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.113843 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" event={"ID":"0b45c128-79bc-48cf-81f8-de28ca277c36","Type":"ContainerStarted","Data":"6cb8f305e387edff74c0c071ac1c6002c4737f6e428a47a4c2108146da230744"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.114230 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" event={"ID":"0b45c128-79bc-48cf-81f8-de28ca277c36","Type":"ContainerStarted","Data":"2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d"} Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.125937 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" podStartSLOduration=3.125915947 podStartE2EDuration="3.125915947s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:43.113556426 +0000 UTC m=+8141.400250242" watchObservedRunningTime="2026-02-03 09:01:43.125915947 +0000 UTC m=+8141.412609743" Feb 03 09:01:43 crc kubenswrapper[4998]: I0203 09:01:43.155232 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" podStartSLOduration=3.155210639 podStartE2EDuration="3.155210639s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:43.149072864 +0000 UTC m=+8141.435766680" watchObservedRunningTime="2026-02-03 09:01:43.155210639 +0000 UTC m=+8141.441904455" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.160122 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3ff0e2-3700-4bea-beef-1cedd521227a","Type":"ContainerStarted","Data":"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.167952 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerStarted","Data":"713846ac5b861dc001688f4a1db1e1c17cb811356f2bc8660005ab65ce23da0d"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.169923 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerStarted","Data":"32377f5c180c6e7fb9874bce889810007262a1ad994b3e8064a73bc4a819f2a2"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.174178 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" event={"ID":"2dae9d3a-7f7b-42de-bbf0-020df791db8d","Type":"ContainerStarted","Data":"6e64bc42bd236ea05aadeb079eda3fec383c2033a241078022e0193cceda7ad0"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.175029 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.180507 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-novncproxy-0" event={"ID":"ffd397d0-f09c-4352-bdeb-2be3b4d6102f","Type":"ContainerStarted","Data":"66127255a4f469ee4c0f471bac85babc54ede92da54b669498e29e7edb3e8abe"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.181643 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.746513747 podStartE2EDuration="6.181624577s" podCreationTimestamp="2026-02-03 09:01:39 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.187203498 +0000 UTC m=+8139.473897304" lastFinishedPulling="2026-02-03 09:01:44.622314328 +0000 UTC m=+8142.909008134" observedRunningTime="2026-02-03 09:01:45.178831288 +0000 UTC m=+8143.465525104" watchObservedRunningTime="2026-02-03 09:01:45.181624577 +0000 UTC m=+8143.468318373" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.182368 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"033d6aaa-cca8-4ec3-af93-1b03bfb969ee","Type":"ContainerStarted","Data":"c3b3b81e043f7da9f699e2f89f48d5286c564814a582067a22917bfc508171db"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.186970 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-novncproxy-0" event={"ID":"7cefac34-9996-45f6-9093-5fdf673be4ab","Type":"ContainerStarted","Data":"2e7dc38716e262f282405c999bc6e7346577a8830301830bc2e12fcae9406586"} Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.210392 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" podStartSLOduration=5.210371643 podStartE2EDuration="5.210371643s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:45.202281733 +0000 UTC m=+8143.488975549" watchObservedRunningTime="2026-02-03 09:01:45.210371643 +0000 UTC m=+8143.497065449" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.232874 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell3-novncproxy-0" podStartSLOduration=2.306993108 podStartE2EDuration="5.232852361s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.696445545 +0000 UTC m=+8139.983139351" lastFinishedPulling="2026-02-03 09:01:44.622304798 +0000 UTC m=+8142.908998604" observedRunningTime="2026-02-03 09:01:45.216708213 +0000 UTC m=+8143.503402019" watchObservedRunningTime="2026-02-03 09:01:45.232852361 +0000 UTC m=+8143.519546167" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.242043 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell2-novncproxy-0" podStartSLOduration=2.293801184 podStartE2EDuration="5.242023172s" podCreationTimestamp="2026-02-03 09:01:40 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.688491099 +0000 UTC m=+8139.975184905" lastFinishedPulling="2026-02-03 09:01:44.636713087 +0000 UTC m=+8142.923406893" observedRunningTime="2026-02-03 09:01:45.235681671 +0000 UTC m=+8143.522375508" watchObservedRunningTime="2026-02-03 09:01:45.242023172 +0000 UTC m=+8143.528716978" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.553551 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.654925 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:45 crc kubenswrapper[4998]: I0203 09:01:45.693767 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.196738 4998 generic.go:334] "Generic (PLEG): container finished" podID="0b45c128-79bc-48cf-81f8-de28ca277c36" containerID="6cb8f305e387edff74c0c071ac1c6002c4737f6e428a47a4c2108146da230744" exitCode=0 Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.196932 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" event={"ID":"0b45c128-79bc-48cf-81f8-de28ca277c36","Type":"ContainerDied","Data":"6cb8f305e387edff74c0c071ac1c6002c4737f6e428a47a4c2108146da230744"} Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.200227 4998 generic.go:334] "Generic (PLEG): container finished" podID="1fb98f13-657a-4351-a9f6-11dcfd10d016" containerID="f30c12123cdef6d56802a25fd5057466ad4d93fbf457729f54850cf52fed68b1" exitCode=0 Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.200363 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46zpr" event={"ID":"1fb98f13-657a-4351-a9f6-11dcfd10d016","Type":"ContainerDied","Data":"f30c12123cdef6d56802a25fd5057466ad4d93fbf457729f54850cf52fed68b1"} Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.203021 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerStarted","Data":"ee786f77630c616427dcd4e6be17001210418e94bd63d38e0eef14b4b9998d6a"} Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.204911 4998 generic.go:334] "Generic (PLEG): container finished" podID="a7243f69-0c30-44db-b97a-c287bca3afff" containerID="b713d56296ee7e37e353d41b8aff00784e66f07f95f6ca6b422f4aecda0cf555" exitCode=0 Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.205041 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" event={"ID":"a7243f69-0c30-44db-b97a-c287bca3afff","Type":"ContainerDied","Data":"b713d56296ee7e37e353d41b8aff00784e66f07f95f6ca6b422f4aecda0cf555"} Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.207584 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerStarted","Data":"44fd951aa5b51cbc30ad1efa6eda401e0ddbe832e4632b85f84fbb28d55aa23a"} Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.238220 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.609841876 podStartE2EDuration="7.238197443s" podCreationTimestamp="2026-02-03 09:01:39 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.008377521 +0000 UTC m=+8139.295071327" lastFinishedPulling="2026-02-03 09:01:44.636733088 +0000 UTC m=+8142.923426894" observedRunningTime="2026-02-03 09:01:45.261541006 +0000 UTC m=+8143.548234832" watchObservedRunningTime="2026-02-03 09:01:46.238197443 +0000 UTC m=+8144.524891259" Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.308158 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.6909096569999997 podStartE2EDuration="7.308140848s" podCreationTimestamp="2026-02-03 09:01:39 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.020110984 +0000 UTC m=+8139.306804790" lastFinishedPulling="2026-02-03 09:01:44.637342165 +0000 UTC m=+8142.924035981" observedRunningTime="2026-02-03 09:01:46.29339719 +0000 UTC m=+8144.580091016" watchObservedRunningTime="2026-02-03 09:01:46.308140848 +0000 UTC m=+8144.594834654" Feb 03 09:01:46 crc kubenswrapper[4998]: I0203 09:01:46.332261 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.131478566 podStartE2EDuration="7.332242183s" podCreationTimestamp="2026-02-03 09:01:39 +0000 UTC" firstStartedPulling="2026-02-03 09:01:41.436095864 +0000 UTC m=+8139.722789670" lastFinishedPulling="2026-02-03 09:01:44.636859451 +0000 UTC m=+8142.923553287" observedRunningTime="2026-02-03 09:01:46.319407238 +0000 UTC m=+8144.606101084" watchObservedRunningTime="2026-02-03 09:01:46.332242183 +0000 UTC m=+8144.618935989" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.215816 4998 generic.go:334] "Generic (PLEG): container finished" podID="ee0c9709-6a32-4f5b-8458-cdb322580330" containerID="d5d68ae0a0a8463547b89e40bc5c0a61fabc5d245b996ead04f399aaa14afeea" exitCode=0 Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.215947 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-gq6wg" event={"ID":"ee0c9709-6a32-4f5b-8458-cdb322580330","Type":"ContainerDied","Data":"d5d68ae0a0a8463547b89e40bc5c0a61fabc5d245b996ead04f399aaa14afeea"} Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.736844 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.743511 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.750410 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.919310 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data\") pod \"a7243f69-0c30-44db-b97a-c287bca3afff\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.919753 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts\") pod \"a7243f69-0c30-44db-b97a-c287bca3afff\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.919928 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle\") pod \"a7243f69-0c30-44db-b97a-c287bca3afff\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920056 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle\") pod \"0b45c128-79bc-48cf-81f8-de28ca277c36\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920204 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle\") pod \"1fb98f13-657a-4351-a9f6-11dcfd10d016\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920369 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data\") pod \"0b45c128-79bc-48cf-81f8-de28ca277c36\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920542 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prmm4\" (UniqueName: \"kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4\") pod \"0b45c128-79bc-48cf-81f8-de28ca277c36\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920708 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnk5x\" (UniqueName: \"kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x\") pod \"a7243f69-0c30-44db-b97a-c287bca3afff\" (UID: \"a7243f69-0c30-44db-b97a-c287bca3afff\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.920854 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data\") pod \"1fb98f13-657a-4351-a9f6-11dcfd10d016\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.921045 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvchh\" (UniqueName: \"kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh\") pod \"1fb98f13-657a-4351-a9f6-11dcfd10d016\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.921208 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts\") pod \"0b45c128-79bc-48cf-81f8-de28ca277c36\" (UID: \"0b45c128-79bc-48cf-81f8-de28ca277c36\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.921396 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts\") pod \"1fb98f13-657a-4351-a9f6-11dcfd10d016\" (UID: \"1fb98f13-657a-4351-a9f6-11dcfd10d016\") " Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.926035 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x" (OuterVolumeSpecName: "kube-api-access-mnk5x") pod "a7243f69-0c30-44db-b97a-c287bca3afff" (UID: "a7243f69-0c30-44db-b97a-c287bca3afff"). InnerVolumeSpecName "kube-api-access-mnk5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.926517 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh" (OuterVolumeSpecName: "kube-api-access-jvchh") pod "1fb98f13-657a-4351-a9f6-11dcfd10d016" (UID: "1fb98f13-657a-4351-a9f6-11dcfd10d016"). InnerVolumeSpecName "kube-api-access-jvchh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.927000 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts" (OuterVolumeSpecName: "scripts") pod "1fb98f13-657a-4351-a9f6-11dcfd10d016" (UID: "1fb98f13-657a-4351-a9f6-11dcfd10d016"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.927846 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts" (OuterVolumeSpecName: "scripts") pod "a7243f69-0c30-44db-b97a-c287bca3afff" (UID: "a7243f69-0c30-44db-b97a-c287bca3afff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.937901 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4" (OuterVolumeSpecName: "kube-api-access-prmm4") pod "0b45c128-79bc-48cf-81f8-de28ca277c36" (UID: "0b45c128-79bc-48cf-81f8-de28ca277c36"). InnerVolumeSpecName "kube-api-access-prmm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.939940 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts" (OuterVolumeSpecName: "scripts") pod "0b45c128-79bc-48cf-81f8-de28ca277c36" (UID: "0b45c128-79bc-48cf-81f8-de28ca277c36"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.949925 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data" (OuterVolumeSpecName: "config-data") pod "1fb98f13-657a-4351-a9f6-11dcfd10d016" (UID: "1fb98f13-657a-4351-a9f6-11dcfd10d016"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.958038 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data" (OuterVolumeSpecName: "config-data") pod "0b45c128-79bc-48cf-81f8-de28ca277c36" (UID: "0b45c128-79bc-48cf-81f8-de28ca277c36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.961212 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b45c128-79bc-48cf-81f8-de28ca277c36" (UID: "0b45c128-79bc-48cf-81f8-de28ca277c36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.961823 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7243f69-0c30-44db-b97a-c287bca3afff" (UID: "a7243f69-0c30-44db-b97a-c287bca3afff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.980229 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data" (OuterVolumeSpecName: "config-data") pod "a7243f69-0c30-44db-b97a-c287bca3afff" (UID: "a7243f69-0c30-44db-b97a-c287bca3afff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:47 crc kubenswrapper[4998]: I0203 09:01:47.981375 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fb98f13-657a-4351-a9f6-11dcfd10d016" (UID: "1fb98f13-657a-4351-a9f6-11dcfd10d016"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024092 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnk5x\" (UniqueName: \"kubernetes.io/projected/a7243f69-0c30-44db-b97a-c287bca3afff-kube-api-access-mnk5x\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024142 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024162 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvchh\" (UniqueName: \"kubernetes.io/projected/1fb98f13-657a-4351-a9f6-11dcfd10d016-kube-api-access-jvchh\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024180 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024198 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024215 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024232 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024248 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7243f69-0c30-44db-b97a-c287bca3afff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024269 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024287 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb98f13-657a-4351-a9f6-11dcfd10d016-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024302 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b45c128-79bc-48cf-81f8-de28ca277c36-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.024319 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prmm4\" (UniqueName: \"kubernetes.io/projected/0b45c128-79bc-48cf-81f8-de28ca277c36-kube-api-access-prmm4\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.225705 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46zpr" event={"ID":"1fb98f13-657a-4351-a9f6-11dcfd10d016","Type":"ContainerDied","Data":"e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340"} Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.225746 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2b1bdbb26801b9316585f9cef83ccab844740821189e5ed2928a50d0b75e340" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.225807 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46zpr" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.231919 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" event={"ID":"a7243f69-0c30-44db-b97a-c287bca3afff","Type":"ContainerDied","Data":"903d3054fc626c7b5b24ed961b92094b2d8fb240b11c2101da0ad236cb57ccf6"} Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.231993 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="903d3054fc626c7b5b24ed961b92094b2d8fb240b11c2101da0ad236cb57ccf6" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.232823 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-db-sync-jk9gl" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.234908 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" event={"ID":"0b45c128-79bc-48cf-81f8-de28ca277c36","Type":"ContainerDied","Data":"2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d"} Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.234969 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d399662a5124b0db9cab9bb590a67dbf2406a1aa765f49506fbe2021f40857d" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.235106 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-db-sync-v9jd9" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.301714 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: E0203 09:01:48.302153 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7243f69-0c30-44db-b97a-c287bca3afff" containerName="nova-cell3-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302171 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7243f69-0c30-44db-b97a-c287bca3afff" containerName="nova-cell3-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: E0203 09:01:48.302193 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b45c128-79bc-48cf-81f8-de28ca277c36" containerName="nova-cell2-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302199 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b45c128-79bc-48cf-81f8-de28ca277c36" containerName="nova-cell2-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: E0203 09:01:48.302207 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb98f13-657a-4351-a9f6-11dcfd10d016" containerName="nova-cell1-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302213 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb98f13-657a-4351-a9f6-11dcfd10d016" containerName="nova-cell1-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302523 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fb98f13-657a-4351-a9f6-11dcfd10d016" containerName="nova-cell1-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302552 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7243f69-0c30-44db-b97a-c287bca3afff" containerName="nova-cell3-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.302564 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b45c128-79bc-48cf-81f8-de28ca277c36" containerName="nova-cell2-conductor-db-sync" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.303185 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.304823 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-conductor-config-data" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.313926 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.393959 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.396541 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.401519 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.416868 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.438038 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.439303 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.441520 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-conductor-config-data" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.452509 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-combined-ca-bundle\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.452614 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rgpf\" (UniqueName: \"kubernetes.io/projected/89c6925b-18c7-4964-a604-d66158e99e54-kube-api-access-8rgpf\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.452811 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-config-data\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.462497 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-conductor-0"] Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555472 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzwfr\" (UniqueName: \"kubernetes.io/projected/211b9c1c-e5d3-4781-a098-fa66abea3a5a-kube-api-access-bzwfr\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555519 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-combined-ca-bundle\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555550 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-combined-ca-bundle\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555570 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rgpf\" (UniqueName: \"kubernetes.io/projected/89c6925b-18c7-4964-a604-d66158e99e54-kube-api-access-8rgpf\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555614 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-config-data\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555636 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-config-data\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555674 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wchh\" (UniqueName: \"kubernetes.io/projected/17913e15-df86-4335-a95c-127ba12b91c9-kube-api-access-4wchh\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555700 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.555745 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.561106 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-combined-ca-bundle\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.561289 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89c6925b-18c7-4964-a604-d66158e99e54-config-data\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.574807 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rgpf\" (UniqueName: \"kubernetes.io/projected/89c6925b-18c7-4964-a604-d66158e99e54-kube-api-access-8rgpf\") pod \"nova-cell2-conductor-0\" (UID: \"89c6925b-18c7-4964-a604-d66158e99e54\") " pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.639686 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.656993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzwfr\" (UniqueName: \"kubernetes.io/projected/211b9c1c-e5d3-4781-a098-fa66abea3a5a-kube-api-access-bzwfr\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.657066 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-combined-ca-bundle\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.657145 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-config-data\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.657202 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wchh\" (UniqueName: \"kubernetes.io/projected/17913e15-df86-4335-a95c-127ba12b91c9-kube-api-access-4wchh\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.657240 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.657310 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.661104 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.668745 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-combined-ca-bundle\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.671560 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211b9c1c-e5d3-4781-a098-fa66abea3a5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.674660 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17913e15-df86-4335-a95c-127ba12b91c9-config-data\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.684504 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wchh\" (UniqueName: \"kubernetes.io/projected/17913e15-df86-4335-a95c-127ba12b91c9-kube-api-access-4wchh\") pod \"nova-cell3-conductor-0\" (UID: \"17913e15-df86-4335-a95c-127ba12b91c9\") " pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.685086 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzwfr\" (UniqueName: \"kubernetes.io/projected/211b9c1c-e5d3-4781-a098-fa66abea3a5a-kube-api-access-bzwfr\") pod \"nova-cell1-conductor-0\" (UID: \"211b9c1c-e5d3-4781-a098-fa66abea3a5a\") " pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.734355 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.754832 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.764570 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.862597 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcndh\" (UniqueName: \"kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh\") pod \"ee0c9709-6a32-4f5b-8458-cdb322580330\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.863317 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data\") pod \"ee0c9709-6a32-4f5b-8458-cdb322580330\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.863339 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle\") pod \"ee0c9709-6a32-4f5b-8458-cdb322580330\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.863462 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts\") pod \"ee0c9709-6a32-4f5b-8458-cdb322580330\" (UID: \"ee0c9709-6a32-4f5b-8458-cdb322580330\") " Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.869549 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh" (OuterVolumeSpecName: "kube-api-access-gcndh") pod "ee0c9709-6a32-4f5b-8458-cdb322580330" (UID: "ee0c9709-6a32-4f5b-8458-cdb322580330"). InnerVolumeSpecName "kube-api-access-gcndh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.870669 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts" (OuterVolumeSpecName: "scripts") pod "ee0c9709-6a32-4f5b-8458-cdb322580330" (UID: "ee0c9709-6a32-4f5b-8458-cdb322580330"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.898982 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data" (OuterVolumeSpecName: "config-data") pod "ee0c9709-6a32-4f5b-8458-cdb322580330" (UID: "ee0c9709-6a32-4f5b-8458-cdb322580330"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.909255 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee0c9709-6a32-4f5b-8458-cdb322580330" (UID: "ee0c9709-6a32-4f5b-8458-cdb322580330"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.968550 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.968587 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcndh\" (UniqueName: \"kubernetes.io/projected/ee0c9709-6a32-4f5b-8458-cdb322580330-kube-api-access-gcndh\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.968598 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:48 crc kubenswrapper[4998]: I0203 09:01:48.968607 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0c9709-6a32-4f5b-8458-cdb322580330-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.128367 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-conductor-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: W0203 09:01:49.134840 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89c6925b_18c7_4964_a604_d66158e99e54.slice/crio-31d1852211d6f7b1daecdeb9dc8b54ebe76d01fd047ef6e03be0fc584683b24a WatchSource:0}: Error finding container 31d1852211d6f7b1daecdeb9dc8b54ebe76d01fd047ef6e03be0fc584683b24a: Status 404 returned error can't find the container with id 31d1852211d6f7b1daecdeb9dc8b54ebe76d01fd047ef6e03be0fc584683b24a Feb 03 09:01:49 crc kubenswrapper[4998]: W0203 09:01:49.231440 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17913e15_df86_4335_a95c_127ba12b91c9.slice/crio-aca8ce09d2cae9cec657b44086761bae8fb707d5a46e6f513e052b08164f0d5b WatchSource:0}: Error finding container aca8ce09d2cae9cec657b44086761bae8fb707d5a46e6f513e052b08164f0d5b: Status 404 returned error can't find the container with id aca8ce09d2cae9cec657b44086761bae8fb707d5a46e6f513e052b08164f0d5b Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.235025 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-conductor-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.246926 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.255322 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-0" event={"ID":"17913e15-df86-4335-a95c-127ba12b91c9","Type":"ContainerStarted","Data":"aca8ce09d2cae9cec657b44086761bae8fb707d5a46e6f513e052b08164f0d5b"} Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.263194 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-gq6wg" event={"ID":"ee0c9709-6a32-4f5b-8458-cdb322580330","Type":"ContainerDied","Data":"797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c"} Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.263238 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="797603bf44dcadc4f0b493cab8ddcd3ce5943159785b1045086cba2c87fa9f0c" Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.263308 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-gq6wg" Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.268415 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-0" event={"ID":"89c6925b-18c7-4964-a604-d66158e99e54","Type":"ContainerStarted","Data":"31d1852211d6f7b1daecdeb9dc8b54ebe76d01fd047ef6e03be0fc584683b24a"} Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.516455 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.516947 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-log" containerID="cri-o://713846ac5b861dc001688f4a1db1e1c17cb811356f2bc8660005ab65ce23da0d" gracePeriod=30 Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.517365 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-api" containerID="cri-o://ee786f77630c616427dcd4e6be17001210418e94bd63d38e0eef14b4b9998d6a" gracePeriod=30 Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.530918 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.531134 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0d3ff0e2-3700-4bea-beef-1cedd521227a" containerName="nova-scheduler-scheduler" containerID="cri-o://79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591" gracePeriod=30 Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.557027 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.557535 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-log" containerID="cri-o://32377f5c180c6e7fb9874bce889810007262a1ad994b3e8064a73bc4a819f2a2" gracePeriod=30 Feb 03 09:01:49 crc kubenswrapper[4998]: I0203 09:01:49.557598 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-metadata" containerID="cri-o://44fd951aa5b51cbc30ad1efa6eda401e0ddbe832e4632b85f84fbb28d55aa23a" gracePeriod=30 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.171309 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.171351 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.188211 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.307051 4998 generic.go:334] "Generic (PLEG): container finished" podID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerID="ee786f77630c616427dcd4e6be17001210418e94bd63d38e0eef14b4b9998d6a" exitCode=0 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.307094 4998 generic.go:334] "Generic (PLEG): container finished" podID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerID="713846ac5b861dc001688f4a1db1e1c17cb811356f2bc8660005ab65ce23da0d" exitCode=143 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.307158 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerDied","Data":"ee786f77630c616427dcd4e6be17001210418e94bd63d38e0eef14b4b9998d6a"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.307190 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerDied","Data":"713846ac5b861dc001688f4a1db1e1c17cb811356f2bc8660005ab65ce23da0d"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.309146 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"211b9c1c-e5d3-4781-a098-fa66abea3a5a","Type":"ContainerStarted","Data":"0f7ebde40d1cbb375570055fa10b7371323cac457839000293ef497948d837c0"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.309200 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"211b9c1c-e5d3-4781-a098-fa66abea3a5a","Type":"ContainerStarted","Data":"12e0800bb6bb1dda2212bf6005ce47ab2d45c8813b4ba680ec3f7f9ac1356f1d"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.310331 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.312459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-conductor-0" event={"ID":"17913e15-df86-4335-a95c-127ba12b91c9","Type":"ContainerStarted","Data":"d5229de71cd1ad241710daf1e9fec5a0edbafbc5c3cf84a48a9b9333849d3d40"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.313645 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.328740 4998 generic.go:334] "Generic (PLEG): container finished" podID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerID="44fd951aa5b51cbc30ad1efa6eda401e0ddbe832e4632b85f84fbb28d55aa23a" exitCode=0 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.328772 4998 generic.go:334] "Generic (PLEG): container finished" podID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerID="32377f5c180c6e7fb9874bce889810007262a1ad994b3e8064a73bc4a819f2a2" exitCode=143 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.328825 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerDied","Data":"44fd951aa5b51cbc30ad1efa6eda401e0ddbe832e4632b85f84fbb28d55aa23a"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.328850 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerDied","Data":"32377f5c180c6e7fb9874bce889810007262a1ad994b3e8064a73bc4a819f2a2"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.331852 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-conductor-0" event={"ID":"89c6925b-18c7-4964-a604-d66158e99e54","Type":"ContainerStarted","Data":"beb0a3db99fd6d29d6b32d0df743bb244a40b048e588fc79f6bcf7c45fd00459"} Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.331877 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.347851 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.347827853 podStartE2EDuration="2.347827853s" podCreationTimestamp="2026-02-03 09:01:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:50.328385131 +0000 UTC m=+8148.615078937" watchObservedRunningTime="2026-02-03 09:01:50.347827853 +0000 UTC m=+8148.634521659" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.353195 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell3-conductor-0" podStartSLOduration=2.353176635 podStartE2EDuration="2.353176635s" podCreationTimestamp="2026-02-03 09:01:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:50.347048781 +0000 UTC m=+8148.633742587" watchObservedRunningTime="2026-02-03 09:01:50.353176635 +0000 UTC m=+8148.639870441" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.357918 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.376638 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell2-conductor-0" podStartSLOduration=2.37662133 podStartE2EDuration="2.37662133s" podCreationTimestamp="2026-02-03 09:01:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:50.365156105 +0000 UTC m=+8148.651849911" watchObservedRunningTime="2026-02-03 09:01:50.37662133 +0000 UTC m=+8148.663315126" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.492201 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.493001 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601551 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs\") pod \"0f52b100-501d-42ad-8ecd-f674dab208e5\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601632 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle\") pod \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601676 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq498\" (UniqueName: \"kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498\") pod \"0f52b100-501d-42ad-8ecd-f674dab208e5\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601811 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvbnc\" (UniqueName: \"kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc\") pod \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601857 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data\") pod \"0f52b100-501d-42ad-8ecd-f674dab208e5\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601893 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle\") pod \"0f52b100-501d-42ad-8ecd-f674dab208e5\" (UID: \"0f52b100-501d-42ad-8ecd-f674dab208e5\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601956 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs\") pod \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.601982 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data\") pod \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\" (UID: \"0604ddb2-60bd-4ce3-aa61-1d1203572bc4\") " Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.604438 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs" (OuterVolumeSpecName: "logs") pod "0604ddb2-60bd-4ce3-aa61-1d1203572bc4" (UID: "0604ddb2-60bd-4ce3-aa61-1d1203572bc4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.606003 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs" (OuterVolumeSpecName: "logs") pod "0f52b100-501d-42ad-8ecd-f674dab208e5" (UID: "0f52b100-501d-42ad-8ecd-f674dab208e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.611105 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc" (OuterVolumeSpecName: "kube-api-access-bvbnc") pod "0604ddb2-60bd-4ce3-aa61-1d1203572bc4" (UID: "0604ddb2-60bd-4ce3-aa61-1d1203572bc4"). InnerVolumeSpecName "kube-api-access-bvbnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.632406 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498" (OuterVolumeSpecName: "kube-api-access-sq498") pod "0f52b100-501d-42ad-8ecd-f674dab208e5" (UID: "0f52b100-501d-42ad-8ecd-f674dab208e5"). InnerVolumeSpecName "kube-api-access-sq498". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.635461 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data" (OuterVolumeSpecName: "config-data") pod "0f52b100-501d-42ad-8ecd-f674dab208e5" (UID: "0f52b100-501d-42ad-8ecd-f674dab208e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.637681 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data" (OuterVolumeSpecName: "config-data") pod "0604ddb2-60bd-4ce3-aa61-1d1203572bc4" (UID: "0604ddb2-60bd-4ce3-aa61-1d1203572bc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.642048 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.649470 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f52b100-501d-42ad-8ecd-f674dab208e5" (UID: "0f52b100-501d-42ad-8ecd-f674dab208e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.652375 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.655502 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0604ddb2-60bd-4ce3-aa61-1d1203572bc4" (UID: "0604ddb2-60bd-4ce3-aa61-1d1203572bc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.676702 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.695141 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721324 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721356 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721366 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f52b100-501d-42ad-8ecd-f674dab208e5-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721374 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721383 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq498\" (UniqueName: \"kubernetes.io/projected/0f52b100-501d-42ad-8ecd-f674dab208e5-kube-api-access-sq498\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721391 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvbnc\" (UniqueName: \"kubernetes.io/projected/0604ddb2-60bd-4ce3-aa61-1d1203572bc4-kube-api-access-bvbnc\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721399 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.721408 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f52b100-501d-42ad-8ecd-f674dab208e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.729698 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.729933 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="dnsmasq-dns" containerID="cri-o://faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f" gracePeriod=10 Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.734009 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:50 crc kubenswrapper[4998]: I0203 09:01:50.979179 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.028864 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data\") pod \"0d3ff0e2-3700-4bea-beef-1cedd521227a\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.028932 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x8z7\" (UniqueName: \"kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7\") pod \"0d3ff0e2-3700-4bea-beef-1cedd521227a\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.028968 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle\") pod \"0d3ff0e2-3700-4bea-beef-1cedd521227a\" (UID: \"0d3ff0e2-3700-4bea-beef-1cedd521227a\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.059026 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7" (OuterVolumeSpecName: "kube-api-access-2x8z7") pod "0d3ff0e2-3700-4bea-beef-1cedd521227a" (UID: "0d3ff0e2-3700-4bea-beef-1cedd521227a"). InnerVolumeSpecName "kube-api-access-2x8z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.064229 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d3ff0e2-3700-4bea-beef-1cedd521227a" (UID: "0d3ff0e2-3700-4bea-beef-1cedd521227a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.072261 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data" (OuterVolumeSpecName: "config-data") pod "0d3ff0e2-3700-4bea-beef-1cedd521227a" (UID: "0d3ff0e2-3700-4bea-beef-1cedd521227a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.131128 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.131161 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x8z7\" (UniqueName: \"kubernetes.io/projected/0d3ff0e2-3700-4bea-beef-1cedd521227a-kube-api-access-2x8z7\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.131171 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3ff0e2-3700-4bea-beef-1cedd521227a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.216849 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.335175 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb\") pod \"b1901378-6afc-483b-9734-835bb9475ea1\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.335341 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv2tg\" (UniqueName: \"kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg\") pod \"b1901378-6afc-483b-9734-835bb9475ea1\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.335365 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config\") pod \"b1901378-6afc-483b-9734-835bb9475ea1\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.335394 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb\") pod \"b1901378-6afc-483b-9734-835bb9475ea1\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.335430 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc\") pod \"b1901378-6afc-483b-9734-835bb9475ea1\" (UID: \"b1901378-6afc-483b-9734-835bb9475ea1\") " Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.339189 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg" (OuterVolumeSpecName: "kube-api-access-mv2tg") pod "b1901378-6afc-483b-9734-835bb9475ea1" (UID: "b1901378-6afc-483b-9734-835bb9475ea1"). InnerVolumeSpecName "kube-api-access-mv2tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.345449 4998 generic.go:334] "Generic (PLEG): container finished" podID="0d3ff0e2-3700-4bea-beef-1cedd521227a" containerID="79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591" exitCode=0 Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.345518 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.345536 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3ff0e2-3700-4bea-beef-1cedd521227a","Type":"ContainerDied","Data":"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.345564 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3ff0e2-3700-4bea-beef-1cedd521227a","Type":"ContainerDied","Data":"db3c73dffecf780d58267d808387878bec3f08f35eb5ed47a40cecc573b2a196"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.345580 4998 scope.go:117] "RemoveContainer" containerID="79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.351496 4998 generic.go:334] "Generic (PLEG): container finished" podID="b1901378-6afc-483b-9734-835bb9475ea1" containerID="faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f" exitCode=0 Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.351558 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" event={"ID":"b1901378-6afc-483b-9734-835bb9475ea1","Type":"ContainerDied","Data":"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.351582 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" event={"ID":"b1901378-6afc-483b-9734-835bb9475ea1","Type":"ContainerDied","Data":"c69bb846979ce9003e48939227f91e7b1a12aa6cec4a646df4a5b2eede0c463f"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.351633 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676b6c965c-hdjhr" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.355839 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f52b100-501d-42ad-8ecd-f674dab208e5","Type":"ContainerDied","Data":"e0c22d6624bb7a4bfae06218e5bce8a6d1ef1793d70286e57088d27a4df77971"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.356568 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.365474 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0604ddb2-60bd-4ce3-aa61-1d1203572bc4","Type":"ContainerDied","Data":"9a3f6f7e4b26d0176e20e7e19f00e2cd8f3ca346e74853b71309c74662b4b4be"} Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.366026 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.382165 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell2-novncproxy-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.401576 4998 scope.go:117] "RemoveContainer" containerID="79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.402117 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell3-novncproxy-0" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.402220 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591\": container with ID starting with 79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591 not found: ID does not exist" containerID="79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.402257 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591"} err="failed to get container status \"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591\": rpc error: code = NotFound desc = could not find container \"79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591\": container with ID starting with 79c52810ab443d057fb91e347ae7e4846255adfdefe5eb1770a94d406cabf591 not found: ID does not exist" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.402283 4998 scope.go:117] "RemoveContainer" containerID="faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.440258 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv2tg\" (UniqueName: \"kubernetes.io/projected/b1901378-6afc-483b-9734-835bb9475ea1-kube-api-access-mv2tg\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.447818 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b1901378-6afc-483b-9734-835bb9475ea1" (UID: "b1901378-6afc-483b-9734-835bb9475ea1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.463929 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config" (OuterVolumeSpecName: "config") pod "b1901378-6afc-483b-9734-835bb9475ea1" (UID: "b1901378-6afc-483b-9734-835bb9475ea1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.471773 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b1901378-6afc-483b-9734-835bb9475ea1" (UID: "b1901378-6afc-483b-9734-835bb9475ea1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.502588 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.509477 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b1901378-6afc-483b-9734-835bb9475ea1" (UID: "b1901378-6afc-483b-9734-835bb9475ea1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.517162 4998 scope.go:117] "RemoveContainer" containerID="f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.534685 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.541217 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.541247 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-config\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.541256 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.541264 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1901378-6afc-483b-9734-835bb9475ea1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.546566 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.557771 4998 scope.go:117] "RemoveContainer" containerID="faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.558274 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f\": container with ID starting with faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f not found: ID does not exist" containerID="faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.558317 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f"} err="failed to get container status \"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f\": rpc error: code = NotFound desc = could not find container \"faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f\": container with ID starting with faf68e58708175c15e4b5bc3e86e1cf4fbda2cad8f0115d6bf7fb2ff8883b10f not found: ID does not exist" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.558345 4998 scope.go:117] "RemoveContainer" containerID="f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.559149 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29\": container with ID starting with f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29 not found: ID does not exist" containerID="f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.559180 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29"} err="failed to get container status \"f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29\": rpc error: code = NotFound desc = could not find container \"f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29\": container with ID starting with f5ffd74af5ad64e21f8d3a414a3be02d8fe8f1074f19bfac15398695aa2adb29 not found: ID does not exist" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.559201 4998 scope.go:117] "RemoveContainer" containerID="ee786f77630c616427dcd4e6be17001210418e94bd63d38e0eef14b4b9998d6a" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560335 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560702 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="dnsmasq-dns" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560720 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="dnsmasq-dns" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560732 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-api" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560738 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-api" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560748 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-log" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560754 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-log" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560776 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-log" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560795 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-log" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560807 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-metadata" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560813 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-metadata" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560905 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0c9709-6a32-4f5b-8458-cdb322580330" containerName="nova-manage" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560913 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0c9709-6a32-4f5b-8458-cdb322580330" containerName="nova-manage" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560923 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="init" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560929 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="init" Feb 03 09:01:51 crc kubenswrapper[4998]: E0203 09:01:51.560940 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3ff0e2-3700-4bea-beef-1cedd521227a" containerName="nova-scheduler-scheduler" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.560946 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3ff0e2-3700-4bea-beef-1cedd521227a" containerName="nova-scheduler-scheduler" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561127 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0c9709-6a32-4f5b-8458-cdb322580330" containerName="nova-manage" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561140 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-log" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561149 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3ff0e2-3700-4bea-beef-1cedd521227a" containerName="nova-scheduler-scheduler" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561163 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-api" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561171 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" containerName="nova-api-log" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561182 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1901378-6afc-483b-9734-835bb9475ea1" containerName="dnsmasq-dns" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.561192 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" containerName="nova-metadata-metadata" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.564675 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.568565 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.573502 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.585160 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.599887 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.609896 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.612355 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.616425 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642212 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642258 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkwhl\" (UniqueName: \"kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642324 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642352 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642371 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgmj8\" (UniqueName: \"kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642395 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642426 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.642443 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.648387 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.656130 4998 scope.go:117] "RemoveContainer" containerID="713846ac5b861dc001688f4a1db1e1c17cb811356f2bc8660005ab65ce23da0d" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.660870 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.691671 4998 scope.go:117] "RemoveContainer" containerID="44fd951aa5b51cbc30ad1efa6eda401e0ddbe832e4632b85f84fbb28d55aa23a" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.707204 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.708546 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.711017 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.712542 4998 scope.go:117] "RemoveContainer" containerID="32377f5c180c6e7fb9874bce889810007262a1ad994b3e8064a73bc4a819f2a2" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.718905 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744478 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744551 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744579 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nc2g\" (UniqueName: \"kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744600 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744654 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.744681 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkwhl\" (UniqueName: \"kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745175 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745279 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745331 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745357 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745488 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745587 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.745640 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgmj8\" (UniqueName: \"kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.750740 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.750744 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.754508 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.756958 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.758435 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.761266 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkwhl\" (UniqueName: \"kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl\") pod \"nova-metadata-0\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " pod="openstack/nova-metadata-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.761864 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgmj8\" (UniqueName: \"kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8\") pod \"nova-api-0\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.766070 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-676b6c965c-hdjhr"] Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.853145 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nc2g\" (UniqueName: \"kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.853258 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.853302 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.857570 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.860161 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.873481 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nc2g\" (UniqueName: \"kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g\") pod \"nova-scheduler-0\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " pod="openstack/nova-scheduler-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.967686 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:01:51 crc kubenswrapper[4998]: I0203 09:01:51.984673 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.033123 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.438157 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:01:52 crc kubenswrapper[4998]: E0203 09:01:52.438765 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.442882 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0604ddb2-60bd-4ce3-aa61-1d1203572bc4" path="/var/lib/kubelet/pods/0604ddb2-60bd-4ce3-aa61-1d1203572bc4/volumes" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.443788 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d3ff0e2-3700-4bea-beef-1cedd521227a" path="/var/lib/kubelet/pods/0d3ff0e2-3700-4bea-beef-1cedd521227a/volumes" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.444292 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f52b100-501d-42ad-8ecd-f674dab208e5" path="/var/lib/kubelet/pods/0f52b100-501d-42ad-8ecd-f674dab208e5/volumes" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.445532 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1901378-6afc-483b-9734-835bb9475ea1" path="/var/lib/kubelet/pods/b1901378-6afc-483b-9734-835bb9475ea1/volumes" Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.485996 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:01:52 crc kubenswrapper[4998]: W0203 09:01:52.490470 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07ea7e95_f31f_4699_9e9b_c837c029c9df.slice/crio-518f24f9497580c0603ea90f32eebbb33f1539b89c674fd79ddb0fe4efd3d742 WatchSource:0}: Error finding container 518f24f9497580c0603ea90f32eebbb33f1539b89c674fd79ddb0fe4efd3d742: Status 404 returned error can't find the container with id 518f24f9497580c0603ea90f32eebbb33f1539b89c674fd79ddb0fe4efd3d742 Feb 03 09:01:52 crc kubenswrapper[4998]: W0203 09:01:52.556377 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb008eded_28cd_4f74_9b31_f1e23f1f4127.slice/crio-ffed95f57f01abb8b5e2a2bc0dd6feaf8f0c4cebe91e60b7c79d05209db469db WatchSource:0}: Error finding container ffed95f57f01abb8b5e2a2bc0dd6feaf8f0c4cebe91e60b7c79d05209db469db: Status 404 returned error can't find the container with id ffed95f57f01abb8b5e2a2bc0dd6feaf8f0c4cebe91e60b7c79d05209db469db Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.556489 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:01:52 crc kubenswrapper[4998]: W0203 09:01:52.557374 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75305be1_077f_4a27_841a_38dc0e87610a.slice/crio-c3c2c239c58806dfc5cda55a6f84f5e32386ff01a97eea2b91eea12a3c0dd9b9 WatchSource:0}: Error finding container c3c2c239c58806dfc5cda55a6f84f5e32386ff01a97eea2b91eea12a3c0dd9b9: Status 404 returned error can't find the container with id c3c2c239c58806dfc5cda55a6f84f5e32386ff01a97eea2b91eea12a3c0dd9b9 Feb 03 09:01:52 crc kubenswrapper[4998]: I0203 09:01:52.566020 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.398652 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerStarted","Data":"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.398968 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerStarted","Data":"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.398980 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerStarted","Data":"518f24f9497580c0603ea90f32eebbb33f1539b89c674fd79ddb0fe4efd3d742"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.401217 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerStarted","Data":"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.401245 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerStarted","Data":"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.401255 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerStarted","Data":"ffed95f57f01abb8b5e2a2bc0dd6feaf8f0c4cebe91e60b7c79d05209db469db"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.402891 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"75305be1-077f-4a27-841a-38dc0e87610a","Type":"ContainerStarted","Data":"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.402941 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"75305be1-077f-4a27-841a-38dc0e87610a","Type":"ContainerStarted","Data":"c3c2c239c58806dfc5cda55a6f84f5e32386ff01a97eea2b91eea12a3c0dd9b9"} Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.421166 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.421144372 podStartE2EDuration="2.421144372s" podCreationTimestamp="2026-02-03 09:01:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:53.416707057 +0000 UTC m=+8151.703400873" watchObservedRunningTime="2026-02-03 09:01:53.421144372 +0000 UTC m=+8151.707838178" Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.447206 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.447183442 podStartE2EDuration="2.447183442s" podCreationTimestamp="2026-02-03 09:01:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:53.437029673 +0000 UTC m=+8151.723723489" watchObservedRunningTime="2026-02-03 09:01:53.447183442 +0000 UTC m=+8151.733877248" Feb 03 09:01:53 crc kubenswrapper[4998]: I0203 09:01:53.456576 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.456557978 podStartE2EDuration="2.456557978s" podCreationTimestamp="2026-02-03 09:01:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:01:53.451235877 +0000 UTC m=+8151.737929693" watchObservedRunningTime="2026-02-03 09:01:53.456557978 +0000 UTC m=+8151.743251804" Feb 03 09:01:56 crc kubenswrapper[4998]: I0203 09:01:56.993510 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 09:01:56 crc kubenswrapper[4998]: I0203 09:01:56.994063 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 09:01:57 crc kubenswrapper[4998]: I0203 09:01:57.034624 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 09:01:58 crc kubenswrapper[4998]: I0203 09:01:58.668226 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell2-conductor-0" Feb 03 09:01:58 crc kubenswrapper[4998]: I0203 09:01:58.768308 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 03 09:01:58 crc kubenswrapper[4998]: I0203 09:01:58.800682 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell3-conductor-0" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.132122 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell2-cell-mapping-r6m22"] Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.133293 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.135595 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-manage-config-data" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.136326 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell2-manage-scripts" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.164507 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-cell-mapping-r6m22"] Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.297445 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.297504 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jspdw\" (UniqueName: \"kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.297543 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.297903 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.399714 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.399961 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.400025 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jspdw\" (UniqueName: \"kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.400106 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.407239 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.407394 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.408090 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.419433 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jspdw\" (UniqueName: \"kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw\") pod \"nova-cell2-cell-mapping-r6m22\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.466296 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.559840 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rqj97"] Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.561503 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.564324 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.564722 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.576142 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rqj97"] Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.722912 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.722995 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqjgj\" (UniqueName: \"kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.723039 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.723066 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.824344 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.824679 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqjgj\" (UniqueName: \"kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.824728 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.824756 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.828936 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.828964 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.830281 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.840725 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqjgj\" (UniqueName: \"kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj\") pod \"nova-cell1-cell-mapping-rqj97\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:01:59 crc kubenswrapper[4998]: I0203 09:01:59.889167 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.129161 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell2-cell-mapping-r6m22"] Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.218072 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell3-cell-mapping-j95hx"] Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.219689 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.222591 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-manage-config-data" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.223086 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell3-manage-scripts" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.244606 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-cell-mapping-j95hx"] Feb 03 09:02:00 crc kubenswrapper[4998]: W0203 09:02:00.322969 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod728f12ea_0558_4049_9c2b_b060c3095656.slice/crio-0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada WatchSource:0}: Error finding container 0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada: Status 404 returned error can't find the container with id 0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.323663 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rqj97"] Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.334315 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.334410 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpdlx\" (UniqueName: \"kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.334464 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.334484 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.435837 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.435903 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpdlx\" (UniqueName: \"kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.435932 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.435949 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.439830 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.440137 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.442631 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.455622 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpdlx\" (UniqueName: \"kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx\") pod \"nova-cell3-cell-mapping-j95hx\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.482814 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-cell-mapping-r6m22" event={"ID":"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef","Type":"ContainerStarted","Data":"6a59f9e69e6dc470e080c378701631f35339724dbe372ff4adc96ca0682cbf51"} Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.482876 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-cell-mapping-r6m22" event={"ID":"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef","Type":"ContainerStarted","Data":"f26ee5871f507a6ee8219c35ec129d31ad0b6d7263fbbbce87ef5d248870cfd3"} Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.486771 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rqj97" event={"ID":"728f12ea-0558-4049-9c2b-b060c3095656","Type":"ContainerStarted","Data":"0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada"} Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.521087 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell2-cell-mapping-r6m22" podStartSLOduration=1.521065465 podStartE2EDuration="1.521065465s" podCreationTimestamp="2026-02-03 09:01:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:00.515188388 +0000 UTC m=+8158.801882204" watchObservedRunningTime="2026-02-03 09:02:00.521065465 +0000 UTC m=+8158.807759291" Feb 03 09:02:00 crc kubenswrapper[4998]: I0203 09:02:00.538649 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.013257 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell3-cell-mapping-j95hx"] Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.500414 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rqj97" event={"ID":"728f12ea-0558-4049-9c2b-b060c3095656","Type":"ContainerStarted","Data":"9f51967606204f6366909309820527afea7387277a802c63ba1ccf1cfde11c7b"} Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.504132 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-cell-mapping-j95hx" event={"ID":"655b6245-34f9-4a4b-9853-a17be8282e97","Type":"ContainerStarted","Data":"ef1837b51060c62eb0518ef55f2f0e6e4a9223dc2f4cf9cdb7931600b94b233e"} Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.504202 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-cell-mapping-j95hx" event={"ID":"655b6245-34f9-4a4b-9853-a17be8282e97","Type":"ContainerStarted","Data":"da094711f76762d602a2c920b2faf6db6e343fb4b135dc84901f7180e96a8cb7"} Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.527204 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rqj97" podStartSLOduration=2.527183828 podStartE2EDuration="2.527183828s" podCreationTimestamp="2026-02-03 09:01:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:01.520041075 +0000 UTC m=+8159.806734881" watchObservedRunningTime="2026-02-03 09:02:01.527183828 +0000 UTC m=+8159.813877634" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.546471 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell3-cell-mapping-j95hx" podStartSLOduration=1.546447195 podStartE2EDuration="1.546447195s" podCreationTimestamp="2026-02-03 09:02:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:01.537218553 +0000 UTC m=+8159.823912379" watchObservedRunningTime="2026-02-03 09:02:01.546447195 +0000 UTC m=+8159.833141011" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.968914 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.973925 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.993800 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 09:02:01 crc kubenswrapper[4998]: I0203 09:02:01.994166 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 09:02:02 crc kubenswrapper[4998]: I0203 09:02:02.034495 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 09:02:02 crc kubenswrapper[4998]: I0203 09:02:02.065717 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 09:02:02 crc kubenswrapper[4998]: I0203 09:02:02.561893 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 09:02:03 crc kubenswrapper[4998]: I0203 09:02:03.135018 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.114:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:03 crc kubenswrapper[4998]: I0203 09:02:03.135578 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.113:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:03 crc kubenswrapper[4998]: I0203 09:02:03.135725 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.113:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:03 crc kubenswrapper[4998]: I0203 09:02:03.135841 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.114:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:05 crc kubenswrapper[4998]: I0203 09:02:05.427718 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:02:05 crc kubenswrapper[4998]: E0203 09:02:05.428619 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:02:05 crc kubenswrapper[4998]: I0203 09:02:05.541656 4998 generic.go:334] "Generic (PLEG): container finished" podID="2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" containerID="6a59f9e69e6dc470e080c378701631f35339724dbe372ff4adc96ca0682cbf51" exitCode=0 Feb 03 09:02:05 crc kubenswrapper[4998]: I0203 09:02:05.541724 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-cell-mapping-r6m22" event={"ID":"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef","Type":"ContainerDied","Data":"6a59f9e69e6dc470e080c378701631f35339724dbe372ff4adc96ca0682cbf51"} Feb 03 09:02:06 crc kubenswrapper[4998]: I0203 09:02:06.551297 4998 generic.go:334] "Generic (PLEG): container finished" podID="728f12ea-0558-4049-9c2b-b060c3095656" containerID="9f51967606204f6366909309820527afea7387277a802c63ba1ccf1cfde11c7b" exitCode=0 Feb 03 09:02:06 crc kubenswrapper[4998]: I0203 09:02:06.551378 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rqj97" event={"ID":"728f12ea-0558-4049-9c2b-b060c3095656","Type":"ContainerDied","Data":"9f51967606204f6366909309820527afea7387277a802c63ba1ccf1cfde11c7b"} Feb 03 09:02:06 crc kubenswrapper[4998]: I0203 09:02:06.552855 4998 generic.go:334] "Generic (PLEG): container finished" podID="655b6245-34f9-4a4b-9853-a17be8282e97" containerID="ef1837b51060c62eb0518ef55f2f0e6e4a9223dc2f4cf9cdb7931600b94b233e" exitCode=0 Feb 03 09:02:06 crc kubenswrapper[4998]: I0203 09:02:06.552924 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-cell-mapping-j95hx" event={"ID":"655b6245-34f9-4a4b-9853-a17be8282e97","Type":"ContainerDied","Data":"ef1837b51060c62eb0518ef55f2f0e6e4a9223dc2f4cf9cdb7931600b94b233e"} Feb 03 09:02:06 crc kubenswrapper[4998]: I0203 09:02:06.923198 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.066524 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data\") pod \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.066683 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle\") pod \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.066718 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts\") pod \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.066743 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jspdw\" (UniqueName: \"kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw\") pod \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\" (UID: \"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef\") " Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.072599 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw" (OuterVolumeSpecName: "kube-api-access-jspdw") pod "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" (UID: "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef"). InnerVolumeSpecName "kube-api-access-jspdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.073690 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts" (OuterVolumeSpecName: "scripts") pod "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" (UID: "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.099992 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data" (OuterVolumeSpecName: "config-data") pod "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" (UID: "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.101344 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" (UID: "2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.169898 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.169937 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.169951 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jspdw\" (UniqueName: \"kubernetes.io/projected/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-kube-api-access-jspdw\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.170015 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.562510 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell2-cell-mapping-r6m22" event={"ID":"2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef","Type":"ContainerDied","Data":"f26ee5871f507a6ee8219c35ec129d31ad0b6d7263fbbbce87ef5d248870cfd3"} Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.562905 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f26ee5871f507a6ee8219c35ec129d31ad0b6d7263fbbbce87ef5d248870cfd3" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.562656 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell2-cell-mapping-r6m22" Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.786686 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.786961 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-log" containerID="cri-o://383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61" gracePeriod=30 Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.787202 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-api" containerID="cri-o://3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f" gracePeriod=30 Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.811676 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.811985 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="75305be1-077f-4a27-841a-38dc0e87610a" containerName="nova-scheduler-scheduler" containerID="cri-o://7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d" gracePeriod=30 Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.823310 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.823619 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-log" containerID="cri-o://7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592" gracePeriod=30 Feb 03 09:02:07 crc kubenswrapper[4998]: I0203 09:02:07.824185 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-metadata" containerID="cri-o://692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858" gracePeriod=30 Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.095327 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.278848 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.296593 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle\") pod \"728f12ea-0558-4049-9c2b-b060c3095656\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.296655 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data\") pod \"728f12ea-0558-4049-9c2b-b060c3095656\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.296691 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts\") pod \"728f12ea-0558-4049-9c2b-b060c3095656\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.296863 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqjgj\" (UniqueName: \"kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj\") pod \"728f12ea-0558-4049-9c2b-b060c3095656\" (UID: \"728f12ea-0558-4049-9c2b-b060c3095656\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.301966 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts" (OuterVolumeSpecName: "scripts") pod "728f12ea-0558-4049-9c2b-b060c3095656" (UID: "728f12ea-0558-4049-9c2b-b060c3095656"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.302236 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj" (OuterVolumeSpecName: "kube-api-access-vqjgj") pod "728f12ea-0558-4049-9c2b-b060c3095656" (UID: "728f12ea-0558-4049-9c2b-b060c3095656"). InnerVolumeSpecName "kube-api-access-vqjgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.325955 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "728f12ea-0558-4049-9c2b-b060c3095656" (UID: "728f12ea-0558-4049-9c2b-b060c3095656"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.334184 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data" (OuterVolumeSpecName: "config-data") pod "728f12ea-0558-4049-9c2b-b060c3095656" (UID: "728f12ea-0558-4049-9c2b-b060c3095656"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.398928 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts\") pod \"655b6245-34f9-4a4b-9853-a17be8282e97\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399085 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") pod \"655b6245-34f9-4a4b-9853-a17be8282e97\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399219 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpdlx\" (UniqueName: \"kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx\") pod \"655b6245-34f9-4a4b-9853-a17be8282e97\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399249 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle\") pod \"655b6245-34f9-4a4b-9853-a17be8282e97\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399767 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqjgj\" (UniqueName: \"kubernetes.io/projected/728f12ea-0558-4049-9c2b-b060c3095656-kube-api-access-vqjgj\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399829 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399842 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.399855 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728f12ea-0558-4049-9c2b-b060c3095656-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.402406 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts" (OuterVolumeSpecName: "scripts") pod "655b6245-34f9-4a4b-9853-a17be8282e97" (UID: "655b6245-34f9-4a4b-9853-a17be8282e97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.402892 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx" (OuterVolumeSpecName: "kube-api-access-jpdlx") pod "655b6245-34f9-4a4b-9853-a17be8282e97" (UID: "655b6245-34f9-4a4b-9853-a17be8282e97"). InnerVolumeSpecName "kube-api-access-jpdlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: E0203 09:02:08.425244 4998 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data podName:655b6245-34f9-4a4b-9853-a17be8282e97 nodeName:}" failed. No retries permitted until 2026-02-03 09:02:08.925211419 +0000 UTC m=+8167.211905225 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data") pod "655b6245-34f9-4a4b-9853-a17be8282e97" (UID: "655b6245-34f9-4a4b-9853-a17be8282e97") : error deleting /var/lib/kubelet/pods/655b6245-34f9-4a4b-9853-a17be8282e97/volume-subpaths: remove /var/lib/kubelet/pods/655b6245-34f9-4a4b-9853-a17be8282e97/volume-subpaths: no such file or directory Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.430909 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "655b6245-34f9-4a4b-9853-a17be8282e97" (UID: "655b6245-34f9-4a4b-9853-a17be8282e97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.504845 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.504914 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpdlx\" (UniqueName: \"kubernetes.io/projected/655b6245-34f9-4a4b-9853-a17be8282e97-kube-api-access-jpdlx\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.504935 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.571558 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell3-cell-mapping-j95hx" event={"ID":"655b6245-34f9-4a4b-9853-a17be8282e97","Type":"ContainerDied","Data":"da094711f76762d602a2c920b2faf6db6e343fb4b135dc84901f7180e96a8cb7"} Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.571597 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da094711f76762d602a2c920b2faf6db6e343fb4b135dc84901f7180e96a8cb7" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.571645 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell3-cell-mapping-j95hx" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.574420 4998 generic.go:334] "Generic (PLEG): container finished" podID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerID="7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592" exitCode=143 Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.574464 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerDied","Data":"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592"} Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.575671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rqj97" event={"ID":"728f12ea-0558-4049-9c2b-b060c3095656","Type":"ContainerDied","Data":"0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada"} Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.575693 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dc5cd55fd080de98e889ea675bca378aeecd3a02f287a34f7042545193eeada" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.575726 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rqj97" Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.577951 4998 generic.go:334] "Generic (PLEG): container finished" podID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerID="383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61" exitCode=143 Feb 03 09:02:08 crc kubenswrapper[4998]: I0203 09:02:08.577971 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerDied","Data":"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61"} Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.013077 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") pod \"655b6245-34f9-4a4b-9853-a17be8282e97\" (UID: \"655b6245-34f9-4a4b-9853-a17be8282e97\") " Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.017669 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data" (OuterVolumeSpecName: "config-data") pod "655b6245-34f9-4a4b-9853-a17be8282e97" (UID: "655b6245-34f9-4a4b-9853-a17be8282e97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.115593 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655b6245-34f9-4a4b-9853-a17be8282e97-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.500416 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.588400 4998 generic.go:334] "Generic (PLEG): container finished" podID="75305be1-077f-4a27-841a-38dc0e87610a" containerID="7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d" exitCode=0 Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.588459 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"75305be1-077f-4a27-841a-38dc0e87610a","Type":"ContainerDied","Data":"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d"} Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.588483 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"75305be1-077f-4a27-841a-38dc0e87610a","Type":"ContainerDied","Data":"c3c2c239c58806dfc5cda55a6f84f5e32386ff01a97eea2b91eea12a3c0dd9b9"} Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.588501 4998 scope.go:117] "RemoveContainer" containerID="7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.588531 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.614940 4998 scope.go:117] "RemoveContainer" containerID="7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d" Feb 03 09:02:09 crc kubenswrapper[4998]: E0203 09:02:09.615464 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d\": container with ID starting with 7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d not found: ID does not exist" containerID="7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.615512 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d"} err="failed to get container status \"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d\": rpc error: code = NotFound desc = could not find container \"7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d\": container with ID starting with 7a9f6c55cafa81615197dfa9a12540ceafaf70ce2f5ce8885f8ad569762a585d not found: ID does not exist" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.622356 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle\") pod \"75305be1-077f-4a27-841a-38dc0e87610a\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.622475 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data\") pod \"75305be1-077f-4a27-841a-38dc0e87610a\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.622548 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nc2g\" (UniqueName: \"kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g\") pod \"75305be1-077f-4a27-841a-38dc0e87610a\" (UID: \"75305be1-077f-4a27-841a-38dc0e87610a\") " Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.626535 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g" (OuterVolumeSpecName: "kube-api-access-2nc2g") pod "75305be1-077f-4a27-841a-38dc0e87610a" (UID: "75305be1-077f-4a27-841a-38dc0e87610a"). InnerVolumeSpecName "kube-api-access-2nc2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.652792 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data" (OuterVolumeSpecName: "config-data") pod "75305be1-077f-4a27-841a-38dc0e87610a" (UID: "75305be1-077f-4a27-841a-38dc0e87610a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.654509 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75305be1-077f-4a27-841a-38dc0e87610a" (UID: "75305be1-077f-4a27-841a-38dc0e87610a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.725018 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.725058 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nc2g\" (UniqueName: \"kubernetes.io/projected/75305be1-077f-4a27-841a-38dc0e87610a-kube-api-access-2nc2g\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.725071 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75305be1-077f-4a27-841a-38dc0e87610a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.931808 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.950963 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.968854 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:09 crc kubenswrapper[4998]: E0203 09:02:09.969263 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655b6245-34f9-4a4b-9853-a17be8282e97" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969289 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="655b6245-34f9-4a4b-9853-a17be8282e97" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: E0203 09:02:09.969300 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="728f12ea-0558-4049-9c2b-b060c3095656" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969307 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="728f12ea-0558-4049-9c2b-b060c3095656" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: E0203 09:02:09.969328 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969335 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: E0203 09:02:09.969360 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75305be1-077f-4a27-841a-38dc0e87610a" containerName="nova-scheduler-scheduler" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969368 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="75305be1-077f-4a27-841a-38dc0e87610a" containerName="nova-scheduler-scheduler" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969546 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969558 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="728f12ea-0558-4049-9c2b-b060c3095656" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969577 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="75305be1-077f-4a27-841a-38dc0e87610a" containerName="nova-scheduler-scheduler" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.969588 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="655b6245-34f9-4a4b-9853-a17be8282e97" containerName="nova-manage" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.970297 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.973060 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 09:02:09 crc kubenswrapper[4998]: I0203 09:02:09.978406 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.138084 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-config-data\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.138163 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2l7t\" (UniqueName: \"kubernetes.io/projected/b01e5169-e1ea-41dc-a09c-fd4922b93f97-kube-api-access-d2l7t\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.138196 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.240993 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-config-data\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.241078 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2l7t\" (UniqueName: \"kubernetes.io/projected/b01e5169-e1ea-41dc-a09c-fd4922b93f97-kube-api-access-d2l7t\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.241119 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.245462 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-config-data\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.247540 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01e5169-e1ea-41dc-a09c-fd4922b93f97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.259514 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2l7t\" (UniqueName: \"kubernetes.io/projected/b01e5169-e1ea-41dc-a09c-fd4922b93f97-kube-api-access-d2l7t\") pod \"nova-scheduler-0\" (UID: \"b01e5169-e1ea-41dc-a09c-fd4922b93f97\") " pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.321520 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.443582 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75305be1-077f-4a27-841a-38dc0e87610a" path="/var/lib/kubelet/pods/75305be1-077f-4a27-841a-38dc0e87610a/volumes" Feb 03 09:02:10 crc kubenswrapper[4998]: I0203 09:02:10.786970 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.562511 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.568884 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.618578 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b01e5169-e1ea-41dc-a09c-fd4922b93f97","Type":"ContainerStarted","Data":"a050f0bddd2638c469e218aec7b84bf87e7e66d06f8d3087e3389b64f591d08d"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.618626 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b01e5169-e1ea-41dc-a09c-fd4922b93f97","Type":"ContainerStarted","Data":"8d8a89438d73f0bbff2f7e5d95425bfdbd82ef8c04ab058533973dd6a3b7fde7"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.623229 4998 generic.go:334] "Generic (PLEG): container finished" podID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerID="3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f" exitCode=0 Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.623318 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerDied","Data":"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.623348 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"07ea7e95-f31f-4699-9e9b-c837c029c9df","Type":"ContainerDied","Data":"518f24f9497580c0603ea90f32eebbb33f1539b89c674fd79ddb0fe4efd3d742"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.623361 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.623369 4998 scope.go:117] "RemoveContainer" containerID="3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.629912 4998 generic.go:334] "Generic (PLEG): container finished" podID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerID="692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858" exitCode=0 Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.629948 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerDied","Data":"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.629970 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b008eded-28cd-4f74-9b31-f1e23f1f4127","Type":"ContainerDied","Data":"ffed95f57f01abb8b5e2a2bc0dd6feaf8f0c4cebe91e60b7c79d05209db469db"} Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.630027 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.643086 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.6430693119999997 podStartE2EDuration="2.643069312s" podCreationTimestamp="2026-02-03 09:02:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:11.636518256 +0000 UTC m=+8169.923212072" watchObservedRunningTime="2026-02-03 09:02:11.643069312 +0000 UTC m=+8169.929763118" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.652718 4998 scope.go:117] "RemoveContainer" containerID="383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668108 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data\") pod \"b008eded-28cd-4f74-9b31-f1e23f1f4127\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668158 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data\") pod \"07ea7e95-f31f-4699-9e9b-c837c029c9df\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668266 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle\") pod \"b008eded-28cd-4f74-9b31-f1e23f1f4127\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668309 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs\") pod \"07ea7e95-f31f-4699-9e9b-c837c029c9df\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668340 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgmj8\" (UniqueName: \"kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8\") pod \"07ea7e95-f31f-4699-9e9b-c837c029c9df\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668427 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs\") pod \"b008eded-28cd-4f74-9b31-f1e23f1f4127\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668465 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle\") pod \"07ea7e95-f31f-4699-9e9b-c837c029c9df\" (UID: \"07ea7e95-f31f-4699-9e9b-c837c029c9df\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668495 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkwhl\" (UniqueName: \"kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl\") pod \"b008eded-28cd-4f74-9b31-f1e23f1f4127\" (UID: \"b008eded-28cd-4f74-9b31-f1e23f1f4127\") " Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668841 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs" (OuterVolumeSpecName: "logs") pod "07ea7e95-f31f-4699-9e9b-c837c029c9df" (UID: "07ea7e95-f31f-4699-9e9b-c837c029c9df"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.668989 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs" (OuterVolumeSpecName: "logs") pod "b008eded-28cd-4f74-9b31-f1e23f1f4127" (UID: "b008eded-28cd-4f74-9b31-f1e23f1f4127"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.669132 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07ea7e95-f31f-4699-9e9b-c837c029c9df-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.669147 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b008eded-28cd-4f74-9b31-f1e23f1f4127-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.673407 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8" (OuterVolumeSpecName: "kube-api-access-lgmj8") pod "07ea7e95-f31f-4699-9e9b-c837c029c9df" (UID: "07ea7e95-f31f-4699-9e9b-c837c029c9df"). InnerVolumeSpecName "kube-api-access-lgmj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.674359 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl" (OuterVolumeSpecName: "kube-api-access-qkwhl") pod "b008eded-28cd-4f74-9b31-f1e23f1f4127" (UID: "b008eded-28cd-4f74-9b31-f1e23f1f4127"). InnerVolumeSpecName "kube-api-access-qkwhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.675405 4998 scope.go:117] "RemoveContainer" containerID="3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f" Feb 03 09:02:11 crc kubenswrapper[4998]: E0203 09:02:11.675821 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f\": container with ID starting with 3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f not found: ID does not exist" containerID="3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.675874 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f"} err="failed to get container status \"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f\": rpc error: code = NotFound desc = could not find container \"3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f\": container with ID starting with 3a435f369f6e622e1be4dc0cd683a83447167df8333afd554a299d335113c05f not found: ID does not exist" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.675903 4998 scope.go:117] "RemoveContainer" containerID="383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61" Feb 03 09:02:11 crc kubenswrapper[4998]: E0203 09:02:11.676719 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61\": container with ID starting with 383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61 not found: ID does not exist" containerID="383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.676738 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61"} err="failed to get container status \"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61\": rpc error: code = NotFound desc = could not find container \"383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61\": container with ID starting with 383898d1e774e0d6fee496f1821254e29da6d084034341559f4f41fe59a5ae61 not found: ID does not exist" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.676768 4998 scope.go:117] "RemoveContainer" containerID="692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.695856 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b008eded-28cd-4f74-9b31-f1e23f1f4127" (UID: "b008eded-28cd-4f74-9b31-f1e23f1f4127"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.698140 4998 scope.go:117] "RemoveContainer" containerID="7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.703368 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07ea7e95-f31f-4699-9e9b-c837c029c9df" (UID: "07ea7e95-f31f-4699-9e9b-c837c029c9df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.705124 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data" (OuterVolumeSpecName: "config-data") pod "b008eded-28cd-4f74-9b31-f1e23f1f4127" (UID: "b008eded-28cd-4f74-9b31-f1e23f1f4127"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.705663 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data" (OuterVolumeSpecName: "config-data") pod "07ea7e95-f31f-4699-9e9b-c837c029c9df" (UID: "07ea7e95-f31f-4699-9e9b-c837c029c9df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.716527 4998 scope.go:117] "RemoveContainer" containerID="692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858" Feb 03 09:02:11 crc kubenswrapper[4998]: E0203 09:02:11.717035 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858\": container with ID starting with 692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858 not found: ID does not exist" containerID="692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.717096 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858"} err="failed to get container status \"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858\": rpc error: code = NotFound desc = could not find container \"692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858\": container with ID starting with 692ec0d4ca8739d5e75428914b294bed8e9011477a405f91211280ce8c4d2858 not found: ID does not exist" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.717123 4998 scope.go:117] "RemoveContainer" containerID="7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592" Feb 03 09:02:11 crc kubenswrapper[4998]: E0203 09:02:11.717433 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592\": container with ID starting with 7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592 not found: ID does not exist" containerID="7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.717471 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592"} err="failed to get container status \"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592\": rpc error: code = NotFound desc = could not find container \"7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592\": container with ID starting with 7f007c1f49dc9c377577ef182003c363ee7759342a2cffaf2877fb39b3ac1592 not found: ID does not exist" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770772 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770815 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkwhl\" (UniqueName: \"kubernetes.io/projected/b008eded-28cd-4f74-9b31-f1e23f1f4127-kube-api-access-qkwhl\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770825 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770834 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07ea7e95-f31f-4699-9e9b-c837c029c9df-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770846 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b008eded-28cd-4f74-9b31-f1e23f1f4127-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:11 crc kubenswrapper[4998]: I0203 09:02:11.770857 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgmj8\" (UniqueName: \"kubernetes.io/projected/07ea7e95-f31f-4699-9e9b-c837c029c9df-kube-api-access-lgmj8\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.022693 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.034293 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.052022 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.075098 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.112102 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: E0203 09:02:12.112721 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-api" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.112746 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-api" Feb 03 09:02:12 crc kubenswrapper[4998]: E0203 09:02:12.112791 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-log" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.112801 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-log" Feb 03 09:02:12 crc kubenswrapper[4998]: E0203 09:02:12.112831 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-log" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.112840 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-log" Feb 03 09:02:12 crc kubenswrapper[4998]: E0203 09:02:12.112858 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-metadata" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.112866 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-metadata" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.113174 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-metadata" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.113246 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-api" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.113269 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" containerName="nova-metadata-log" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.113283 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" containerName="nova-api-log" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.114646 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.116740 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.123464 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.143757 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.146200 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.149314 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.164381 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.285700 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.285813 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-config-data\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.285939 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-config-data\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.285982 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.286020 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-logs\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.286120 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ftg6\" (UniqueName: \"kubernetes.io/projected/74c8cac3-60c1-475c-896f-f19ba0e995eb-kube-api-access-8ftg6\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.286148 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74c8cac3-60c1-475c-896f-f19ba0e995eb-logs\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.286174 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj9gh\" (UniqueName: \"kubernetes.io/projected/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-kube-api-access-zj9gh\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388108 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ftg6\" (UniqueName: \"kubernetes.io/projected/74c8cac3-60c1-475c-896f-f19ba0e995eb-kube-api-access-8ftg6\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388166 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74c8cac3-60c1-475c-896f-f19ba0e995eb-logs\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388198 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj9gh\" (UniqueName: \"kubernetes.io/projected/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-kube-api-access-zj9gh\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388231 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388291 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-config-data\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388340 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-config-data\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388386 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.388424 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-logs\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.389000 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-logs\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.389605 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74c8cac3-60c1-475c-896f-f19ba0e995eb-logs\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.394372 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-config-data\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.394634 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.409079 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-config-data\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.416153 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74c8cac3-60c1-475c-896f-f19ba0e995eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.421219 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ftg6\" (UniqueName: \"kubernetes.io/projected/74c8cac3-60c1-475c-896f-f19ba0e995eb-kube-api-access-8ftg6\") pod \"nova-metadata-0\" (UID: \"74c8cac3-60c1-475c-896f-f19ba0e995eb\") " pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.422676 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj9gh\" (UniqueName: \"kubernetes.io/projected/e5de42bf-ed76-4b09-9e10-5dffcd9476b6-kube-api-access-zj9gh\") pod \"nova-api-0\" (UID: \"e5de42bf-ed76-4b09-9e10-5dffcd9476b6\") " pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.437067 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.463734 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07ea7e95-f31f-4699-9e9b-c837c029c9df" path="/var/lib/kubelet/pods/07ea7e95-f31f-4699-9e9b-c837c029c9df/volumes" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.464424 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b008eded-28cd-4f74-9b31-f1e23f1f4127" path="/var/lib/kubelet/pods/b008eded-28cd-4f74-9b31-f1e23f1f4127/volumes" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.469642 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 09:02:12 crc kubenswrapper[4998]: I0203 09:02:12.938739 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 09:02:12 crc kubenswrapper[4998]: W0203 09:02:12.941096 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74c8cac3_60c1_475c_896f_f19ba0e995eb.slice/crio-49f7155e301d412644d798047a61e07910ff75695be2453b5e57c7859aa11a0c WatchSource:0}: Error finding container 49f7155e301d412644d798047a61e07910ff75695be2453b5e57c7859aa11a0c: Status 404 returned error can't find the container with id 49f7155e301d412644d798047a61e07910ff75695be2453b5e57c7859aa11a0c Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.032086 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 09:02:13 crc kubenswrapper[4998]: W0203 09:02:13.033807 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5de42bf_ed76_4b09_9e10_5dffcd9476b6.slice/crio-f8f59720fad9b2dc69ed1bd5a662a9d429594315db479b826b4a5247b6348f0c WatchSource:0}: Error finding container f8f59720fad9b2dc69ed1bd5a662a9d429594315db479b826b4a5247b6348f0c: Status 404 returned error can't find the container with id f8f59720fad9b2dc69ed1bd5a662a9d429594315db479b826b4a5247b6348f0c Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.667280 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"74c8cac3-60c1-475c-896f-f19ba0e995eb","Type":"ContainerStarted","Data":"ee6c981252b4795aa6a5951782b13432f250ea0f033cc50218787897ba916d54"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.667643 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"74c8cac3-60c1-475c-896f-f19ba0e995eb","Type":"ContainerStarted","Data":"1139b2d020448f8912bb65d68d1219fcaf183f2adec048a17689ae1622146b1e"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.667743 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"74c8cac3-60c1-475c-896f-f19ba0e995eb","Type":"ContainerStarted","Data":"49f7155e301d412644d798047a61e07910ff75695be2453b5e57c7859aa11a0c"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.669624 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5de42bf-ed76-4b09-9e10-5dffcd9476b6","Type":"ContainerStarted","Data":"66aeaff39af619b6e2d5777389b1a005889e00846a1c99f0f0f94f72e44377d0"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.669671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5de42bf-ed76-4b09-9e10-5dffcd9476b6","Type":"ContainerStarted","Data":"bfd0570c65e7ef0aecbbf3ccd674c27d77577ca83919171ed4e843ea1fe04040"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.669687 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5de42bf-ed76-4b09-9e10-5dffcd9476b6","Type":"ContainerStarted","Data":"f8f59720fad9b2dc69ed1bd5a662a9d429594315db479b826b4a5247b6348f0c"} Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.691132 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.6911162640000001 podStartE2EDuration="1.691116264s" podCreationTimestamp="2026-02-03 09:02:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:13.686322928 +0000 UTC m=+8171.973016744" watchObservedRunningTime="2026-02-03 09:02:13.691116264 +0000 UTC m=+8171.977810070" Feb 03 09:02:13 crc kubenswrapper[4998]: I0203 09:02:13.709794 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.709769764 podStartE2EDuration="1.709769764s" podCreationTimestamp="2026-02-03 09:02:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:13.705927715 +0000 UTC m=+8171.992621521" watchObservedRunningTime="2026-02-03 09:02:13.709769764 +0000 UTC m=+8171.996463570" Feb 03 09:02:15 crc kubenswrapper[4998]: I0203 09:02:15.321995 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 09:02:17 crc kubenswrapper[4998]: I0203 09:02:17.437494 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 09:02:17 crc kubenswrapper[4998]: I0203 09:02:17.438018 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 09:02:18 crc kubenswrapper[4998]: I0203 09:02:18.428953 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:02:18 crc kubenswrapper[4998]: E0203 09:02:18.429954 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:02:20 crc kubenswrapper[4998]: I0203 09:02:20.322723 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 09:02:20 crc kubenswrapper[4998]: I0203 09:02:20.363578 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 09:02:20 crc kubenswrapper[4998]: I0203 09:02:20.770658 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 09:02:22 crc kubenswrapper[4998]: I0203 09:02:22.437947 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 09:02:22 crc kubenswrapper[4998]: I0203 09:02:22.437989 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 09:02:22 crc kubenswrapper[4998]: I0203 09:02:22.470497 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 09:02:22 crc kubenswrapper[4998]: I0203 09:02:22.470835 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 09:02:23 crc kubenswrapper[4998]: I0203 09:02:23.602048 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="74c8cac3-60c1-475c-896f-f19ba0e995eb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.120:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:23 crc kubenswrapper[4998]: I0203 09:02:23.602071 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e5de42bf-ed76-4b09-9e10-5dffcd9476b6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.121:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:23 crc kubenswrapper[4998]: I0203 09:02:23.602093 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="74c8cac3-60c1-475c-896f-f19ba0e995eb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.120:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:23 crc kubenswrapper[4998]: I0203 09:02:23.602157 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e5de42bf-ed76-4b09-9e10-5dffcd9476b6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.121:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 09:02:31 crc kubenswrapper[4998]: I0203 09:02:31.428041 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:02:31 crc kubenswrapper[4998]: E0203 09:02:31.428987 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.441952 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.443426 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.444454 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.475505 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.476619 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.481372 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.483584 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.862863 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.865389 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 09:02:32 crc kubenswrapper[4998]: I0203 09:02:32.866163 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.114735 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt"] Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.116706 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.134965 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt"] Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.205068 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-dns-svc\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.205121 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-config\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.205159 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.205198 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzsnt\" (UniqueName: \"kubernetes.io/projected/bfa12fa9-60f0-42b4-88e6-06431f8218dc-kube-api-access-nzsnt\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.205237 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.306725 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.306848 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-dns-svc\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.306876 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-config\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.306913 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.306950 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzsnt\" (UniqueName: \"kubernetes.io/projected/bfa12fa9-60f0-42b4-88e6-06431f8218dc-kube-api-access-nzsnt\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.307945 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.308148 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-dns-svc\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.308191 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-config\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.308194 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfa12fa9-60f0-42b4-88e6-06431f8218dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.325481 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzsnt\" (UniqueName: \"kubernetes.io/projected/bfa12fa9-60f0-42b4-88e6-06431f8218dc-kube-api-access-nzsnt\") pod \"dnsmasq-dns-5ccf7dbd4c-vvwtt\" (UID: \"bfa12fa9-60f0-42b4-88e6-06431f8218dc\") " pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.436674 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:33 crc kubenswrapper[4998]: I0203 09:02:33.876637 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt"] Feb 03 09:02:33 crc kubenswrapper[4998]: W0203 09:02:33.893207 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfa12fa9_60f0_42b4_88e6_06431f8218dc.slice/crio-99db7059250dd823225759ff3f07733c72736e1abc5d7e24f04e2991f1e0ba6b WatchSource:0}: Error finding container 99db7059250dd823225759ff3f07733c72736e1abc5d7e24f04e2991f1e0ba6b: Status 404 returned error can't find the container with id 99db7059250dd823225759ff3f07733c72736e1abc5d7e24f04e2991f1e0ba6b Feb 03 09:02:34 crc kubenswrapper[4998]: I0203 09:02:34.892920 4998 generic.go:334] "Generic (PLEG): container finished" podID="bfa12fa9-60f0-42b4-88e6-06431f8218dc" containerID="47c35d3f4362e25d73f0bfb629fd523e5175664be94159e9765bcf63135d2722" exitCode=0 Feb 03 09:02:34 crc kubenswrapper[4998]: I0203 09:02:34.893119 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" event={"ID":"bfa12fa9-60f0-42b4-88e6-06431f8218dc","Type":"ContainerDied","Data":"47c35d3f4362e25d73f0bfb629fd523e5175664be94159e9765bcf63135d2722"} Feb 03 09:02:34 crc kubenswrapper[4998]: I0203 09:02:34.893549 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" event={"ID":"bfa12fa9-60f0-42b4-88e6-06431f8218dc","Type":"ContainerStarted","Data":"99db7059250dd823225759ff3f07733c72736e1abc5d7e24f04e2991f1e0ba6b"} Feb 03 09:02:35 crc kubenswrapper[4998]: I0203 09:02:35.921809 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" event={"ID":"bfa12fa9-60f0-42b4-88e6-06431f8218dc","Type":"ContainerStarted","Data":"017b4523fa561cf8a4a100071e696f0b8bc89b343e0d70f6ea263c02f00ad89c"} Feb 03 09:02:35 crc kubenswrapper[4998]: I0203 09:02:35.923696 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.082487 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" podStartSLOduration=4.082470353 podStartE2EDuration="4.082470353s" podCreationTimestamp="2026-02-03 09:02:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:02:35.954282144 +0000 UTC m=+8194.240975980" watchObservedRunningTime="2026-02-03 09:02:37.082470353 +0000 UTC m=+8195.369164159" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.097084 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.099172 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.109018 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.181825 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.181879 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdrfg\" (UniqueName: \"kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.181952 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.284446 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.284490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdrfg\" (UniqueName: \"kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.284571 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.284998 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.285053 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.307156 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdrfg\" (UniqueName: \"kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg\") pod \"redhat-marketplace-wzdjd\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.424285 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.910478 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:37 crc kubenswrapper[4998]: I0203 09:02:37.939304 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerStarted","Data":"2a5f10c688eb230b133f719ba87fe9a129e79fc2960d2118d99b48a4fbf22f2c"} Feb 03 09:02:38 crc kubenswrapper[4998]: I0203 09:02:38.954811 4998 generic.go:334] "Generic (PLEG): container finished" podID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerID="195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215" exitCode=0 Feb 03 09:02:38 crc kubenswrapper[4998]: I0203 09:02:38.954888 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerDied","Data":"195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215"} Feb 03 09:02:39 crc kubenswrapper[4998]: I0203 09:02:39.970494 4998 generic.go:334] "Generic (PLEG): container finished" podID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerID="d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1" exitCode=0 Feb 03 09:02:39 crc kubenswrapper[4998]: I0203 09:02:39.970620 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerDied","Data":"d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1"} Feb 03 09:02:40 crc kubenswrapper[4998]: I0203 09:02:40.981989 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerStarted","Data":"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229"} Feb 03 09:02:41 crc kubenswrapper[4998]: I0203 09:02:41.025557 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzdjd" podStartSLOduration=2.602151705 podStartE2EDuration="4.025537145s" podCreationTimestamp="2026-02-03 09:02:37 +0000 UTC" firstStartedPulling="2026-02-03 09:02:38.960755566 +0000 UTC m=+8197.247449372" lastFinishedPulling="2026-02-03 09:02:40.384140996 +0000 UTC m=+8198.670834812" observedRunningTime="2026-02-03 09:02:41.015046527 +0000 UTC m=+8199.301740343" watchObservedRunningTime="2026-02-03 09:02:41.025537145 +0000 UTC m=+8199.312230971" Feb 03 09:02:43 crc kubenswrapper[4998]: I0203 09:02:43.438866 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccf7dbd4c-vvwtt" Feb 03 09:02:43 crc kubenswrapper[4998]: I0203 09:02:43.509322 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:02:43 crc kubenswrapper[4998]: I0203 09:02:43.509525 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="dnsmasq-dns" containerID="cri-o://6e64bc42bd236ea05aadeb079eda3fec383c2033a241078022e0193cceda7ad0" gracePeriod=10 Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.017478 4998 generic.go:334] "Generic (PLEG): container finished" podID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerID="6e64bc42bd236ea05aadeb079eda3fec383c2033a241078022e0193cceda7ad0" exitCode=0 Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.017794 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" event={"ID":"2dae9d3a-7f7b-42de-bbf0-020df791db8d","Type":"ContainerDied","Data":"6e64bc42bd236ea05aadeb079eda3fec383c2033a241078022e0193cceda7ad0"} Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.172021 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.219386 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc\") pod \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.219494 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config\") pod \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.219566 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb\") pod \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.219668 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb\") pod \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.219733 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmb7z\" (UniqueName: \"kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z\") pod \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\" (UID: \"2dae9d3a-7f7b-42de-bbf0-020df791db8d\") " Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.235529 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z" (OuterVolumeSpecName: "kube-api-access-fmb7z") pod "2dae9d3a-7f7b-42de-bbf0-020df791db8d" (UID: "2dae9d3a-7f7b-42de-bbf0-020df791db8d"). InnerVolumeSpecName "kube-api-access-fmb7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.277851 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2dae9d3a-7f7b-42de-bbf0-020df791db8d" (UID: "2dae9d3a-7f7b-42de-bbf0-020df791db8d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.278903 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config" (OuterVolumeSpecName: "config") pod "2dae9d3a-7f7b-42de-bbf0-020df791db8d" (UID: "2dae9d3a-7f7b-42de-bbf0-020df791db8d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.283686 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2dae9d3a-7f7b-42de-bbf0-020df791db8d" (UID: "2dae9d3a-7f7b-42de-bbf0-020df791db8d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.291011 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2dae9d3a-7f7b-42de-bbf0-020df791db8d" (UID: "2dae9d3a-7f7b-42de-bbf0-020df791db8d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.326251 4998 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.326297 4998 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-config\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.326309 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.326323 4998 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dae9d3a-7f7b-42de-bbf0-020df791db8d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:44 crc kubenswrapper[4998]: I0203 09:02:44.326338 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmb7z\" (UniqueName: \"kubernetes.io/projected/2dae9d3a-7f7b-42de-bbf0-020df791db8d-kube-api-access-fmb7z\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.029942 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" event={"ID":"2dae9d3a-7f7b-42de-bbf0-020df791db8d","Type":"ContainerDied","Data":"de9c6dc3daa85f62d459f2a8df84687c1a60df3310441a887a47b994f62ec73f"} Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.030030 4998 scope.go:117] "RemoveContainer" containerID="6e64bc42bd236ea05aadeb079eda3fec383c2033a241078022e0193cceda7ad0" Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.029978 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d9d87dc5-tsm89" Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.054396 4998 scope.go:117] "RemoveContainer" containerID="3c5d5c7355cb33b7aaf41f341d08618393148899646e19c5c7980b727d0bceaa" Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.062564 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.076599 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d9d87dc5-tsm89"] Feb 03 09:02:45 crc kubenswrapper[4998]: I0203 09:02:45.427776 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:02:45 crc kubenswrapper[4998]: E0203 09:02:45.427993 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:02:46 crc kubenswrapper[4998]: I0203 09:02:46.438547 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" path="/var/lib/kubelet/pods/2dae9d3a-7f7b-42de-bbf0-020df791db8d/volumes" Feb 03 09:02:47 crc kubenswrapper[4998]: I0203 09:02:47.425487 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:47 crc kubenswrapper[4998]: I0203 09:02:47.425727 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:47 crc kubenswrapper[4998]: I0203 09:02:47.512218 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:48 crc kubenswrapper[4998]: I0203 09:02:48.108020 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:48 crc kubenswrapper[4998]: I0203 09:02:48.166201 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:49 crc kubenswrapper[4998]: I0203 09:02:49.041666 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-c1fc-account-create-update-77knt"] Feb 03 09:02:49 crc kubenswrapper[4998]: I0203 09:02:49.052366 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-kxrfk"] Feb 03 09:02:49 crc kubenswrapper[4998]: I0203 09:02:49.061677 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-c1fc-account-create-update-77knt"] Feb 03 09:02:49 crc kubenswrapper[4998]: I0203 09:02:49.080907 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-kxrfk"] Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.086222 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wzdjd" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="registry-server" containerID="cri-o://a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229" gracePeriod=2 Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.441468 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="588b9b50-534f-4ba6-a43f-d5065b6d380b" path="/var/lib/kubelet/pods/588b9b50-534f-4ba6-a43f-d5065b6d380b/volumes" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.442330 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e701d871-ccce-4ab7-bb1a-4fb9c1766519" path="/var/lib/kubelet/pods/e701d871-ccce-4ab7-bb1a-4fb9c1766519/volumes" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.574692 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.663886 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdrfg\" (UniqueName: \"kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg\") pod \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.663992 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities\") pod \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.664064 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content\") pod \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\" (UID: \"91c5f6dc-bc9a-4c16-b390-12567ef93f25\") " Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.665340 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities" (OuterVolumeSpecName: "utilities") pod "91c5f6dc-bc9a-4c16-b390-12567ef93f25" (UID: "91c5f6dc-bc9a-4c16-b390-12567ef93f25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.671310 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg" (OuterVolumeSpecName: "kube-api-access-bdrfg") pod "91c5f6dc-bc9a-4c16-b390-12567ef93f25" (UID: "91c5f6dc-bc9a-4c16-b390-12567ef93f25"). InnerVolumeSpecName "kube-api-access-bdrfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.687978 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91c5f6dc-bc9a-4c16-b390-12567ef93f25" (UID: "91c5f6dc-bc9a-4c16-b390-12567ef93f25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.766001 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdrfg\" (UniqueName: \"kubernetes.io/projected/91c5f6dc-bc9a-4c16-b390-12567ef93f25-kube-api-access-bdrfg\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.766042 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:50 crc kubenswrapper[4998]: I0203 09:02:50.766061 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c5f6dc-bc9a-4c16-b390-12567ef93f25-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.097143 4998 generic.go:334] "Generic (PLEG): container finished" podID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerID="a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229" exitCode=0 Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.097189 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerDied","Data":"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229"} Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.097212 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzdjd" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.097226 4998 scope.go:117] "RemoveContainer" containerID="a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.097214 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzdjd" event={"ID":"91c5f6dc-bc9a-4c16-b390-12567ef93f25","Type":"ContainerDied","Data":"2a5f10c688eb230b133f719ba87fe9a129e79fc2960d2118d99b48a4fbf22f2c"} Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.128287 4998 scope.go:117] "RemoveContainer" containerID="d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.136355 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.150493 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzdjd"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.155243 4998 scope.go:117] "RemoveContainer" containerID="195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.205986 4998 scope.go:117] "RemoveContainer" containerID="a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.206529 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229\": container with ID starting with a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229 not found: ID does not exist" containerID="a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.206581 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229"} err="failed to get container status \"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229\": rpc error: code = NotFound desc = could not find container \"a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229\": container with ID starting with a1ffd6b39c99226e31002babb64fd1a64e88ff6d9aa6642e1cddef961642d229 not found: ID does not exist" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.206613 4998 scope.go:117] "RemoveContainer" containerID="d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.206925 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1\": container with ID starting with d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1 not found: ID does not exist" containerID="d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.206953 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1"} err="failed to get container status \"d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1\": rpc error: code = NotFound desc = could not find container \"d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1\": container with ID starting with d788bc8e212059e99aa41e5e4dfc91cfa54396de434b2d51b43b109e5f6ddbd1 not found: ID does not exist" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.206972 4998 scope.go:117] "RemoveContainer" containerID="195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.207282 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215\": container with ID starting with 195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215 not found: ID does not exist" containerID="195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.207314 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215"} err="failed to get container status \"195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215\": rpc error: code = NotFound desc = could not find container \"195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215\": container with ID starting with 195a75ea91bdb07fe71aa8c5b33cf6b0567a3f72a0a3d3829c3553e9c334b215 not found: ID does not exist" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.299583 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.300116 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="registry-server" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300138 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="registry-server" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.300152 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="extract-utilities" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300160 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="extract-utilities" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.300180 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="init" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300188 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="init" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.300210 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="extract-content" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300216 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="extract-content" Feb 03 09:02:51 crc kubenswrapper[4998]: E0203 09:02:51.300238 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="dnsmasq-dns" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300245 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="dnsmasq-dns" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300450 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dae9d3a-7f7b-42de-bbf0-020df791db8d" containerName="dnsmasq-dns" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.300465 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" containerName="registry-server" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.301757 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.303868 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.303907 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-rrdbn" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.304339 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.309573 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.320021 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.328897 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.329239 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-log" containerID="cri-o://e0eb951f59cc81cd84f7352acf66f491cedc1d18f5b15f1f270fcf806e59eca3" gracePeriod=30 Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.329306 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-httpd" containerID="cri-o://c91c19cfc1b81f291034be303b73960734521d310e296607382f07a72d87478a" gracePeriod=30 Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.376099 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.376495 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tnwz\" (UniqueName: \"kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.376546 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.376606 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.376964 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.416191 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.420045 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.427705 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.427990 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-log" containerID="cri-o://8f625047d9c339deca82aaa35a157accc63e915b898cda507604c573e8945883" gracePeriod=30 Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.428208 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-httpd" containerID="cri-o://a491e52e6f6b8a080073ac81d9d83bc305117b38152c9a6748c9e053ab82fb2c" gracePeriod=30 Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.451321 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479231 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tnwz\" (UniqueName: \"kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479280 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479326 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479382 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479418 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479447 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f72sg\" (UniqueName: \"kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479465 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479483 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479500 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.479519 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.481262 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.482687 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.483189 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.493545 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.498431 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tnwz\" (UniqueName: \"kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz\") pod \"horizon-6b9db79d55-flsdk\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.581263 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.581351 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.581390 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f72sg\" (UniqueName: \"kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.581412 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.581432 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.582248 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.582897 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.583025 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.584335 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.599796 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f72sg\" (UniqueName: \"kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg\") pod \"horizon-76df769777-ctkvq\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.626039 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:02:51 crc kubenswrapper[4998]: I0203 09:02:51.639760 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.023875 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.055126 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.056703 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.065129 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.092102 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.092161 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.092187 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5tcx\" (UniqueName: \"kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.092262 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.092346 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.115312 4998 generic.go:334] "Generic (PLEG): container finished" podID="9bbff215-ce70-481e-b365-c88d60876046" containerID="e0eb951f59cc81cd84f7352acf66f491cedc1d18f5b15f1f270fcf806e59eca3" exitCode=143 Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.115353 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerDied","Data":"e0eb951f59cc81cd84f7352acf66f491cedc1d18f5b15f1f270fcf806e59eca3"} Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.117392 4998 generic.go:334] "Generic (PLEG): container finished" podID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerID="8f625047d9c339deca82aaa35a157accc63e915b898cda507604c573e8945883" exitCode=143 Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.117433 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerDied","Data":"8f625047d9c339deca82aaa35a157accc63e915b898cda507604c573e8945883"} Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.130940 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.148285 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:02:52 crc kubenswrapper[4998]: W0203 09:02:52.151236 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4b8261b_447d_4da5_b807_14385e683a3e.slice/crio-8e1883be21b95fa02df78da53caac36567ff6252b0bc6d9833ca626d3974e547 WatchSource:0}: Error finding container 8e1883be21b95fa02df78da53caac36567ff6252b0bc6d9833ca626d3974e547: Status 404 returned error can't find the container with id 8e1883be21b95fa02df78da53caac36567ff6252b0bc6d9833ca626d3974e547 Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.193815 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.193872 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5tcx\" (UniqueName: \"kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.193957 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.194064 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.194112 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.194716 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.195059 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.195566 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.203170 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.211470 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5tcx\" (UniqueName: \"kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx\") pod \"horizon-7cfddfbbcc-mxshd\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.398181 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.443770 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c5f6dc-bc9a-4c16-b390-12567ef93f25" path="/var/lib/kubelet/pods/91c5f6dc-bc9a-4c16-b390-12567ef93f25/volumes" Feb 03 09:02:52 crc kubenswrapper[4998]: I0203 09:02:52.912403 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:02:52 crc kubenswrapper[4998]: W0203 09:02:52.917734 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod653c897b_8672_405d_9e1e_e877f22d452e.slice/crio-627ba7b30f3be4a2360a9f881daeb3691d35cb13dcecd6c3e5e773fc2894da6e WatchSource:0}: Error finding container 627ba7b30f3be4a2360a9f881daeb3691d35cb13dcecd6c3e5e773fc2894da6e: Status 404 returned error can't find the container with id 627ba7b30f3be4a2360a9f881daeb3691d35cb13dcecd6c3e5e773fc2894da6e Feb 03 09:02:53 crc kubenswrapper[4998]: I0203 09:02:53.132421 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerStarted","Data":"6e4a53f99d275b55301c0ecfee968137b6d9bb80da0354e446f31c05bf4222d2"} Feb 03 09:02:53 crc kubenswrapper[4998]: I0203 09:02:53.133850 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerStarted","Data":"627ba7b30f3be4a2360a9f881daeb3691d35cb13dcecd6c3e5e773fc2894da6e"} Feb 03 09:02:53 crc kubenswrapper[4998]: I0203 09:02:53.135754 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerStarted","Data":"8e1883be21b95fa02df78da53caac36567ff6252b0bc6d9833ca626d3974e547"} Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.181161 4998 generic.go:334] "Generic (PLEG): container finished" podID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerID="a491e52e6f6b8a080073ac81d9d83bc305117b38152c9a6748c9e053ab82fb2c" exitCode=0 Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.181246 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerDied","Data":"a491e52e6f6b8a080073ac81d9d83bc305117b38152c9a6748c9e053ab82fb2c"} Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.185644 4998 generic.go:334] "Generic (PLEG): container finished" podID="9bbff215-ce70-481e-b365-c88d60876046" containerID="c91c19cfc1b81f291034be303b73960734521d310e296607382f07a72d87478a" exitCode=0 Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.185691 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerDied","Data":"c91c19cfc1b81f291034be303b73960734521d310e296607382f07a72d87478a"} Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.185717 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9bbff215-ce70-481e-b365-c88d60876046","Type":"ContainerDied","Data":"a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6"} Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.185735 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5ee8d83fbe8ab899adedbc25098eba7f82afcd2cd05e7965728c0a62b6aaaf6" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.228913 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.260685 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.260728 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.260909 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.260963 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.261007 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfr22\" (UniqueName: \"kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.261042 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.261340 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.261811 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs" (OuterVolumeSpecName: "logs") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.267760 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts" (OuterVolumeSpecName: "scripts") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.272281 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22" (OuterVolumeSpecName: "kube-api-access-jfr22") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "kube-api-access-jfr22". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.354502 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362093 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362306 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") pod \"9bbff215-ce70-481e-b365-c88d60876046\" (UID: \"9bbff215-ce70-481e-b365-c88d60876046\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362852 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfr22\" (UniqueName: \"kubernetes.io/projected/9bbff215-ce70-481e-b365-c88d60876046-kube-api-access-jfr22\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362871 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362882 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362893 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bbff215-ce70-481e-b365-c88d60876046-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: W0203 09:02:55.362978 4998 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/9bbff215-ce70-481e-b365-c88d60876046/volumes/kubernetes.io~secret/combined-ca-bundle Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.362994 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.381956 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data" (OuterVolumeSpecName: "config-data") pod "9bbff215-ce70-481e-b365-c88d60876046" (UID: "9bbff215-ce70-481e-b365-c88d60876046"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.464995 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.465023 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bbff215-ce70-481e-b365-c88d60876046-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.567940 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568009 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568302 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568519 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568690 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl22b\" (UniqueName: \"kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568766 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.568933 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle\") pod \"6185f8b1-ac87-4f2c-9046-698033fac18f\" (UID: \"6185f8b1-ac87-4f2c-9046-698033fac18f\") " Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.569076 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs" (OuterVolumeSpecName: "logs") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.571544 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b" (OuterVolumeSpecName: "kube-api-access-zl22b") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "kube-api-access-zl22b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.572393 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.572415 4998 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6185f8b1-ac87-4f2c-9046-698033fac18f-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.572425 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl22b\" (UniqueName: \"kubernetes.io/projected/6185f8b1-ac87-4f2c-9046-698033fac18f-kube-api-access-zl22b\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.582211 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts" (OuterVolumeSpecName: "scripts") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.596821 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.629082 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data" (OuterVolumeSpecName: "config-data") pod "6185f8b1-ac87-4f2c-9046-698033fac18f" (UID: "6185f8b1-ac87-4f2c-9046-698033fac18f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.675101 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.675136 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:55 crc kubenswrapper[4998]: I0203 09:02:55.675146 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6185f8b1-ac87-4f2c-9046-698033fac18f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.201338 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.201331 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6185f8b1-ac87-4f2c-9046-698033fac18f","Type":"ContainerDied","Data":"9d7b9ae810beee5374f37a4b8a0ac955ce118cb34c97dd80b6b28aa11a02f770"} Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.201408 4998 scope.go:117] "RemoveContainer" containerID="a491e52e6f6b8a080073ac81d9d83bc305117b38152c9a6748c9e053ab82fb2c" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.201364 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.249929 4998 scope.go:117] "RemoveContainer" containerID="8f625047d9c339deca82aaa35a157accc63e915b898cda507604c573e8945883" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.261997 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.285051 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.301484 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.311206 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: E0203 09:02:56.311737 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.311754 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: E0203 09:02:56.311772 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.311795 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: E0203 09:02:56.311812 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.311821 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: E0203 09:02:56.311837 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.311844 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.315800 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.315845 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-log" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.315864 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.315884 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bbff215-ce70-481e-b365-c88d60876046" containerName="glance-httpd" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.317408 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.322669 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.323109 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.323947 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l4z5" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.324886 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.370450 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.385643 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.387617 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.391154 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.391852 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392210 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392252 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392300 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzgp6\" (UniqueName: \"kubernetes.io/projected/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-kube-api-access-qzgp6\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392331 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392358 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392395 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392411 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-logs\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392428 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392453 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392475 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.392498 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxx4k\" (UniqueName: \"kubernetes.io/projected/bd51c9c0-8d3a-4119-bbab-3f97a800d180-kube-api-access-hxx4k\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.394953 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.441851 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6185f8b1-ac87-4f2c-9046-698033fac18f" path="/var/lib/kubelet/pods/6185f8b1-ac87-4f2c-9046-698033fac18f/volumes" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.442463 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bbff215-ce70-481e-b365-c88d60876046" path="/var/lib/kubelet/pods/9bbff215-ce70-481e-b365-c88d60876046/volumes" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494002 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzgp6\" (UniqueName: \"kubernetes.io/projected/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-kube-api-access-qzgp6\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494083 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494126 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494203 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494230 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-logs\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494248 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494279 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494301 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494342 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxx4k\" (UniqueName: \"kubernetes.io/projected/bd51c9c0-8d3a-4119-bbab-3f97a800d180-kube-api-access-hxx4k\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494379 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494437 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.494461 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.496568 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-logs\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.496790 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.497223 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.498836 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd51c9c0-8d3a-4119-bbab-3f97a800d180-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.499614 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.500661 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.501385 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.504389 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.511148 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd51c9c0-8d3a-4119-bbab-3f97a800d180-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.514489 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.519895 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxx4k\" (UniqueName: \"kubernetes.io/projected/bd51c9c0-8d3a-4119-bbab-3f97a800d180-kube-api-access-hxx4k\") pod \"glance-default-external-api-0\" (UID: \"bd51c9c0-8d3a-4119-bbab-3f97a800d180\") " pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.536680 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzgp6\" (UniqueName: \"kubernetes.io/projected/d8d27788-6ccd-4466-9b8a-718a9b1d4d82-kube-api-access-qzgp6\") pod \"glance-default-internal-api-0\" (UID: \"d8d27788-6ccd-4466-9b8a-718a9b1d4d82\") " pod="openstack/glance-default-internal-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.645492 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 09:02:56 crc kubenswrapper[4998]: I0203 09:02:56.714086 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 09:02:57 crc kubenswrapper[4998]: I0203 09:02:57.310791 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 09:02:57 crc kubenswrapper[4998]: I0203 09:02:57.427355 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:02:57 crc kubenswrapper[4998]: E0203 09:02:57.427641 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:03:01 crc kubenswrapper[4998]: I0203 09:03:01.034028 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xw255"] Feb 03 09:03:01 crc kubenswrapper[4998]: I0203 09:03:01.044642 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xw255"] Feb 03 09:03:02 crc kubenswrapper[4998]: I0203 09:03:02.445629 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4e9b73-32ac-4ce4-b368-7bf898a77203" path="/var/lib/kubelet/pods/7b4e9b73-32ac-4ce4-b368-7bf898a77203/volumes" Feb 03 09:03:02 crc kubenswrapper[4998]: W0203 09:03:02.647723 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd51c9c0_8d3a_4119_bbab_3f97a800d180.slice/crio-fd7027683f9fa102648a54b1c00f3bd6e7a0b6e72920ff683b5d356e778000f5 WatchSource:0}: Error finding container fd7027683f9fa102648a54b1c00f3bd6e7a0b6e72920ff683b5d356e778000f5: Status 404 returned error can't find the container with id fd7027683f9fa102648a54b1c00f3bd6e7a0b6e72920ff683b5d356e778000f5 Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.197549 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 09:03:03 crc kubenswrapper[4998]: W0203 09:03:03.216060 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8d27788_6ccd_4466_9b8a_718a9b1d4d82.slice/crio-afd826f71e8742dc4d3bb5b73166fe6349c1617041bb8b290773ff321e3db5af WatchSource:0}: Error finding container afd826f71e8742dc4d3bb5b73166fe6349c1617041bb8b290773ff321e3db5af: Status 404 returned error can't find the container with id afd826f71e8742dc4d3bb5b73166fe6349c1617041bb8b290773ff321e3db5af Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.295884 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerStarted","Data":"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.295932 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerStarted","Data":"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.302450 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8d27788-6ccd-4466-9b8a-718a9b1d4d82","Type":"ContainerStarted","Data":"afd826f71e8742dc4d3bb5b73166fe6349c1617041bb8b290773ff321e3db5af"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.308362 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerStarted","Data":"2390667d220f3bf095f9db0274b329bdff7f91e3aeb25401da4cc5061e8187bb"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.308546 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6b9db79d55-flsdk" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon-log" containerID="cri-o://2390667d220f3bf095f9db0274b329bdff7f91e3aeb25401da4cc5061e8187bb" gracePeriod=30 Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.308916 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6b9db79d55-flsdk" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon" containerID="cri-o://13e01c781bfb3f213d19d3962d6d2f458f7658abf00eba588b630863ea82dfe9" gracePeriod=30 Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.325047 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd51c9c0-8d3a-4119-bbab-3f97a800d180","Type":"ContainerStarted","Data":"fd7027683f9fa102648a54b1c00f3bd6e7a0b6e72920ff683b5d356e778000f5"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.326313 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7cfddfbbcc-mxshd" podStartSLOduration=1.480388989 podStartE2EDuration="11.326283949s" podCreationTimestamp="2026-02-03 09:02:52 +0000 UTC" firstStartedPulling="2026-02-03 09:02:52.920276648 +0000 UTC m=+8211.206970454" lastFinishedPulling="2026-02-03 09:03:02.766171608 +0000 UTC m=+8221.052865414" observedRunningTime="2026-02-03 09:03:03.324134228 +0000 UTC m=+8221.610828054" watchObservedRunningTime="2026-02-03 09:03:03.326283949 +0000 UTC m=+8221.612977755" Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.330909 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerStarted","Data":"aec57c2a155dbd9ead526744734eba2bc197d52441d243529ff026c4d7867602"} Feb 03 09:03:03 crc kubenswrapper[4998]: I0203 09:03:03.362074 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6b9db79d55-flsdk" podStartSLOduration=1.7663183569999998 podStartE2EDuration="12.362053364s" podCreationTimestamp="2026-02-03 09:02:51 +0000 UTC" firstStartedPulling="2026-02-03 09:02:52.154284723 +0000 UTC m=+8210.440978529" lastFinishedPulling="2026-02-03 09:03:02.75001973 +0000 UTC m=+8221.036713536" observedRunningTime="2026-02-03 09:03:03.350831526 +0000 UTC m=+8221.637525342" watchObservedRunningTime="2026-02-03 09:03:03.362053364 +0000 UTC m=+8221.648747170" Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.341998 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8d27788-6ccd-4466-9b8a-718a9b1d4d82","Type":"ContainerStarted","Data":"316fc991c1a3de221dc89957c96fb1f920e5355d3215a909a2e6cf4658c996eb"} Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.344109 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerStarted","Data":"13e01c781bfb3f213d19d3962d6d2f458f7658abf00eba588b630863ea82dfe9"} Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.345643 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd51c9c0-8d3a-4119-bbab-3f97a800d180","Type":"ContainerStarted","Data":"6a7528840fdd18d43aca6e9e00240b95d8e2d79d8f4fd38417322e00242e1a5f"} Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.345697 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd51c9c0-8d3a-4119-bbab-3f97a800d180","Type":"ContainerStarted","Data":"0b252e22ea5b409c6f0da2e444f67728ca2e53a369667a0d7ed986cc3d08878e"} Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.348245 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerStarted","Data":"01348e78b6cd4fddebbd6c141af6646bbe2052b847f49bfaace8d9ffa4b52cc3"} Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.370667 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.370650808 podStartE2EDuration="8.370650808s" podCreationTimestamp="2026-02-03 09:02:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:03:04.367151269 +0000 UTC m=+8222.653845075" watchObservedRunningTime="2026-02-03 09:03:04.370650808 +0000 UTC m=+8222.657344614" Feb 03 09:03:04 crc kubenswrapper[4998]: I0203 09:03:04.394632 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-76df769777-ctkvq" podStartSLOduration=2.7491601 podStartE2EDuration="13.394615788s" podCreationTimestamp="2026-02-03 09:02:51 +0000 UTC" firstStartedPulling="2026-02-03 09:02:52.134924594 +0000 UTC m=+8210.421618400" lastFinishedPulling="2026-02-03 09:03:02.780380282 +0000 UTC m=+8221.067074088" observedRunningTime="2026-02-03 09:03:04.394381892 +0000 UTC m=+8222.681075708" watchObservedRunningTime="2026-02-03 09:03:04.394615788 +0000 UTC m=+8222.681309594" Feb 03 09:03:05 crc kubenswrapper[4998]: I0203 09:03:05.359858 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8d27788-6ccd-4466-9b8a-718a9b1d4d82","Type":"ContainerStarted","Data":"f2c84949aa9b6ea1089d16c18fe6a8b4fa027a17d88df166b6c3a3974a4d93b2"} Feb 03 09:03:05 crc kubenswrapper[4998]: I0203 09:03:05.385809 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.385757547 podStartE2EDuration="9.385757547s" podCreationTimestamp="2026-02-03 09:02:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:03:05.375530606 +0000 UTC m=+8223.662224502" watchObservedRunningTime="2026-02-03 09:03:05.385757547 +0000 UTC m=+8223.672451363" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.646140 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.646208 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.715226 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.715270 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.741974 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.761297 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.778675 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:06 crc kubenswrapper[4998]: I0203 09:03:06.816757 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 09:03:07 crc kubenswrapper[4998]: I0203 09:03:07.381393 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:07 crc kubenswrapper[4998]: I0203 09:03:07.381927 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 09:03:07 crc kubenswrapper[4998]: I0203 09:03:07.381980 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:07 crc kubenswrapper[4998]: I0203 09:03:07.382002 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 09:03:09 crc kubenswrapper[4998]: I0203 09:03:09.910933 4998 scope.go:117] "RemoveContainer" containerID="730db2e28f5983a06e19092a1a1ebf2d8fb475a548b0a1ab528eed6af9061f12" Feb 03 09:03:09 crc kubenswrapper[4998]: I0203 09:03:09.969131 4998 scope.go:117] "RemoveContainer" containerID="902de658799b556c98159961659608e7982b4826b627f6bb02e8f1bf5018079d" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.010365 4998 scope.go:117] "RemoveContainer" containerID="4e57e862dc5310bc81886881da2c1766eb97bb67d6d4419cadb9cc96753d2d2d" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.213308 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.219341 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.427409 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:03:10 crc kubenswrapper[4998]: E0203 09:03:10.427890 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.470809 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 09:03:10 crc kubenswrapper[4998]: I0203 09:03:10.470938 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 09:03:11 crc kubenswrapper[4998]: I0203 09:03:11.626540 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:03:11 crc kubenswrapper[4998]: I0203 09:03:11.640833 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:03:11 crc kubenswrapper[4998]: I0203 09:03:11.640882 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:03:12 crc kubenswrapper[4998]: I0203 09:03:12.398800 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:03:12 crc kubenswrapper[4998]: I0203 09:03:12.399155 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:03:16 crc kubenswrapper[4998]: I0203 09:03:16.051946 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-sh5xm"] Feb 03 09:03:16 crc kubenswrapper[4998]: I0203 09:03:16.069680 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-sh5xm"] Feb 03 09:03:16 crc kubenswrapper[4998]: I0203 09:03:16.438695 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27863dd2-1d6b-4bd4-b215-e8e18a08146f" path="/var/lib/kubelet/pods/27863dd2-1d6b-4bd4-b215-e8e18a08146f/volumes" Feb 03 09:03:21 crc kubenswrapper[4998]: I0203 09:03:21.642630 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Feb 03 09:03:22 crc kubenswrapper[4998]: I0203 09:03:22.401238 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.126:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.126:8080: connect: connection refused" Feb 03 09:03:25 crc kubenswrapper[4998]: I0203 09:03:25.428105 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:03:25 crc kubenswrapper[4998]: E0203 09:03:25.428734 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.540036 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.641713 4998 generic.go:334] "Generic (PLEG): container finished" podID="a4b8261b-447d-4da5-b807-14385e683a3e" containerID="13e01c781bfb3f213d19d3962d6d2f458f7658abf00eba588b630863ea82dfe9" exitCode=137 Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.641769 4998 generic.go:334] "Generic (PLEG): container finished" podID="a4b8261b-447d-4da5-b807-14385e683a3e" containerID="2390667d220f3bf095f9db0274b329bdff7f91e3aeb25401da4cc5061e8187bb" exitCode=137 Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.641798 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerDied","Data":"13e01c781bfb3f213d19d3962d6d2f458f7658abf00eba588b630863ea82dfe9"} Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.641821 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerDied","Data":"2390667d220f3bf095f9db0274b329bdff7f91e3aeb25401da4cc5061e8187bb"} Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.772538 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.922349 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts\") pod \"a4b8261b-447d-4da5-b807-14385e683a3e\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.922388 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tnwz\" (UniqueName: \"kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz\") pod \"a4b8261b-447d-4da5-b807-14385e683a3e\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.922437 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data\") pod \"a4b8261b-447d-4da5-b807-14385e683a3e\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.922475 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs\") pod \"a4b8261b-447d-4da5-b807-14385e683a3e\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.922542 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key\") pod \"a4b8261b-447d-4da5-b807-14385e683a3e\" (UID: \"a4b8261b-447d-4da5-b807-14385e683a3e\") " Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.923601 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs" (OuterVolumeSpecName: "logs") pod "a4b8261b-447d-4da5-b807-14385e683a3e" (UID: "a4b8261b-447d-4da5-b807-14385e683a3e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.929895 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a4b8261b-447d-4da5-b807-14385e683a3e" (UID: "a4b8261b-447d-4da5-b807-14385e683a3e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.931958 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz" (OuterVolumeSpecName: "kube-api-access-5tnwz") pod "a4b8261b-447d-4da5-b807-14385e683a3e" (UID: "a4b8261b-447d-4da5-b807-14385e683a3e"). InnerVolumeSpecName "kube-api-access-5tnwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.955763 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts" (OuterVolumeSpecName: "scripts") pod "a4b8261b-447d-4da5-b807-14385e683a3e" (UID: "a4b8261b-447d-4da5-b807-14385e683a3e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:03:33 crc kubenswrapper[4998]: I0203 09:03:33.957662 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data" (OuterVolumeSpecName: "config-data") pod "a4b8261b-447d-4da5-b807-14385e683a3e" (UID: "a4b8261b-447d-4da5-b807-14385e683a3e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.025114 4998 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4b8261b-447d-4da5-b807-14385e683a3e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.025152 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.025165 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tnwz\" (UniqueName: \"kubernetes.io/projected/a4b8261b-447d-4da5-b807-14385e683a3e-kube-api-access-5tnwz\") on node \"crc\" DevicePath \"\"" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.025193 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4b8261b-447d-4da5-b807-14385e683a3e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.025205 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4b8261b-447d-4da5-b807-14385e683a3e-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.243839 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.652225 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9db79d55-flsdk" event={"ID":"a4b8261b-447d-4da5-b807-14385e683a3e","Type":"ContainerDied","Data":"8e1883be21b95fa02df78da53caac36567ff6252b0bc6d9833ca626d3974e547"} Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.652311 4998 scope.go:117] "RemoveContainer" containerID="13e01c781bfb3f213d19d3962d6d2f458f7658abf00eba588b630863ea82dfe9" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.652327 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9db79d55-flsdk" Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.681394 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.692989 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6b9db79d55-flsdk"] Feb 03 09:03:34 crc kubenswrapper[4998]: I0203 09:03:34.812474 4998 scope.go:117] "RemoveContainer" containerID="2390667d220f3bf095f9db0274b329bdff7f91e3aeb25401da4cc5061e8187bb" Feb 03 09:03:35 crc kubenswrapper[4998]: I0203 09:03:35.285395 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:03:35 crc kubenswrapper[4998]: I0203 09:03:35.804422 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:03:35 crc kubenswrapper[4998]: I0203 09:03:35.878668 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:03:35 crc kubenswrapper[4998]: I0203 09:03:35.878937 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon-log" containerID="cri-o://aec57c2a155dbd9ead526744734eba2bc197d52441d243529ff026c4d7867602" gracePeriod=30 Feb 03 09:03:35 crc kubenswrapper[4998]: I0203 09:03:35.879023 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" containerID="cri-o://01348e78b6cd4fddebbd6c141af6646bbe2052b847f49bfaace8d9ffa4b52cc3" gracePeriod=30 Feb 03 09:03:36 crc kubenswrapper[4998]: I0203 09:03:36.441695 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" path="/var/lib/kubelet/pods/a4b8261b-447d-4da5-b807-14385e683a3e/volumes" Feb 03 09:03:39 crc kubenswrapper[4998]: I0203 09:03:39.702149 4998 generic.go:334] "Generic (PLEG): container finished" podID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerID="01348e78b6cd4fddebbd6c141af6646bbe2052b847f49bfaace8d9ffa4b52cc3" exitCode=0 Feb 03 09:03:39 crc kubenswrapper[4998]: I0203 09:03:39.702246 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerDied","Data":"01348e78b6cd4fddebbd6c141af6646bbe2052b847f49bfaace8d9ffa4b52cc3"} Feb 03 09:03:40 crc kubenswrapper[4998]: I0203 09:03:40.428824 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:03:40 crc kubenswrapper[4998]: E0203 09:03:40.429512 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:03:41 crc kubenswrapper[4998]: I0203 09:03:41.641352 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Feb 03 09:03:51 crc kubenswrapper[4998]: I0203 09:03:51.640904 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Feb 03 09:03:52 crc kubenswrapper[4998]: I0203 09:03:52.433557 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:03:52 crc kubenswrapper[4998]: E0203 09:03:52.433843 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:01 crc kubenswrapper[4998]: I0203 09:04:01.641339 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-76df769777-ctkvq" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Feb 03 09:04:01 crc kubenswrapper[4998]: I0203 09:04:01.642776 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:04:03 crc kubenswrapper[4998]: I0203 09:04:03.427920 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:04:03 crc kubenswrapper[4998]: E0203 09:04:03.429856 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.000463 4998 generic.go:334] "Generic (PLEG): container finished" podID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerID="aec57c2a155dbd9ead526744734eba2bc197d52441d243529ff026c4d7867602" exitCode=137 Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.000504 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerDied","Data":"aec57c2a155dbd9ead526744734eba2bc197d52441d243529ff026c4d7867602"} Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.923641 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.977773 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data\") pod \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.977920 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts\") pod \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.978018 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key\") pod \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.978091 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f72sg\" (UniqueName: \"kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg\") pod \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.978332 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs\") pod \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\" (UID: \"685d4b78-a56c-4c73-9c21-fa7ca0218d52\") " Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.980360 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs" (OuterVolumeSpecName: "logs") pod "685d4b78-a56c-4c73-9c21-fa7ca0218d52" (UID: "685d4b78-a56c-4c73-9c21-fa7ca0218d52"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.986964 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "685d4b78-a56c-4c73-9c21-fa7ca0218d52" (UID: "685d4b78-a56c-4c73-9c21-fa7ca0218d52"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:04:06 crc kubenswrapper[4998]: I0203 09:04:06.990641 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg" (OuterVolumeSpecName: "kube-api-access-f72sg") pod "685d4b78-a56c-4c73-9c21-fa7ca0218d52" (UID: "685d4b78-a56c-4c73-9c21-fa7ca0218d52"). InnerVolumeSpecName "kube-api-access-f72sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.004595 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data" (OuterVolumeSpecName: "config-data") pod "685d4b78-a56c-4c73-9c21-fa7ca0218d52" (UID: "685d4b78-a56c-4c73-9c21-fa7ca0218d52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.010338 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76df769777-ctkvq" event={"ID":"685d4b78-a56c-4c73-9c21-fa7ca0218d52","Type":"ContainerDied","Data":"6e4a53f99d275b55301c0ecfee968137b6d9bb80da0354e446f31c05bf4222d2"} Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.010392 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76df769777-ctkvq" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.010505 4998 scope.go:117] "RemoveContainer" containerID="01348e78b6cd4fddebbd6c141af6646bbe2052b847f49bfaace8d9ffa4b52cc3" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.010609 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts" (OuterVolumeSpecName: "scripts") pod "685d4b78-a56c-4c73-9c21-fa7ca0218d52" (UID: "685d4b78-a56c-4c73-9c21-fa7ca0218d52"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.094308 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.094355 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/685d4b78-a56c-4c73-9c21-fa7ca0218d52-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.094367 4998 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/685d4b78-a56c-4c73-9c21-fa7ca0218d52-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.094388 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f72sg\" (UniqueName: \"kubernetes.io/projected/685d4b78-a56c-4c73-9c21-fa7ca0218d52-kube-api-access-f72sg\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.094399 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685d4b78-a56c-4c73-9c21-fa7ca0218d52-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.245328 4998 scope.go:117] "RemoveContainer" containerID="aec57c2a155dbd9ead526744734eba2bc197d52441d243529ff026c4d7867602" Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.342343 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:04:07 crc kubenswrapper[4998]: I0203 09:04:07.350352 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-76df769777-ctkvq"] Feb 03 09:04:08 crc kubenswrapper[4998]: I0203 09:04:08.441290 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" path="/var/lib/kubelet/pods/685d4b78-a56c-4c73-9c21-fa7ca0218d52/volumes" Feb 03 09:04:10 crc kubenswrapper[4998]: I0203 09:04:10.203963 4998 scope.go:117] "RemoveContainer" containerID="99553d5c67392a927217632b92e88e5cbb2c0ac0d3b5a0e8a67e3143f2297f16" Feb 03 09:04:10 crc kubenswrapper[4998]: I0203 09:04:10.241477 4998 scope.go:117] "RemoveContainer" containerID="965de3887b885f0f44470783e308209cd737fd8394de56641be91635362da69f" Feb 03 09:04:10 crc kubenswrapper[4998]: I0203 09:04:10.306847 4998 scope.go:117] "RemoveContainer" containerID="41947ca52c7460b59dc8c3a1ad40a8b0f54ed7e5ae7dd40bbbaf241ff7481d18" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.719277 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5d4bfc795c-4fj69"] Feb 03 09:04:14 crc kubenswrapper[4998]: E0203 09:04:14.720314 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720331 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: E0203 09:04:14.720346 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720355 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: E0203 09:04:14.720367 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720376 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: E0203 09:04:14.720384 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720391 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720644 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720671 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720684 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="685d4b78-a56c-4c73-9c21-fa7ca0218d52" containerName="horizon-log" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.720700 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4b8261b-447d-4da5-b807-14385e683a3e" containerName="horizon" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.721908 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.728990 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d4bfc795c-4fj69"] Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.739132 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-logs\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.739366 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-scripts\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.739460 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-config-data\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.739488 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-967fh\" (UniqueName: \"kubernetes.io/projected/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-kube-api-access-967fh\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.739514 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-horizon-secret-key\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.840893 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-config-data\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.840946 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-967fh\" (UniqueName: \"kubernetes.io/projected/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-kube-api-access-967fh\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.840968 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-horizon-secret-key\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.841032 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-logs\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.841112 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-scripts\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.841860 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-logs\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.842029 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-scripts\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.842752 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-config-data\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.850249 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-horizon-secret-key\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:14 crc kubenswrapper[4998]: I0203 09:04:14.858496 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-967fh\" (UniqueName: \"kubernetes.io/projected/ab381bc8-8294-4c0a-91c1-f9bd491c66a1-kube-api-access-967fh\") pod \"horizon-5d4bfc795c-4fj69\" (UID: \"ab381bc8-8294-4c0a-91c1-f9bd491c66a1\") " pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:15 crc kubenswrapper[4998]: I0203 09:04:15.039837 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:15 crc kubenswrapper[4998]: W0203 09:04:15.504142 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab381bc8_8294_4c0a_91c1_f9bd491c66a1.slice/crio-cb43df24a938808bc870628317da8f1ecc636261f7a74feedbe947881f4a7838 WatchSource:0}: Error finding container cb43df24a938808bc870628317da8f1ecc636261f7a74feedbe947881f4a7838: Status 404 returned error can't find the container with id cb43df24a938808bc870628317da8f1ecc636261f7a74feedbe947881f4a7838 Feb 03 09:04:15 crc kubenswrapper[4998]: I0203 09:04:15.508435 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d4bfc795c-4fj69"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.070827 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-22hsh"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.072710 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.094964 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-b776-account-create-update-bvltf"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.097510 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.100489 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.120529 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-22hsh"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.143070 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-b776-account-create-update-bvltf"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.144614 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d4bfc795c-4fj69" event={"ID":"ab381bc8-8294-4c0a-91c1-f9bd491c66a1","Type":"ContainerStarted","Data":"6f61ca5ca3fc876317528a25837238ee543ae9051201472bc8c3692eec4d4829"} Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.144678 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d4bfc795c-4fj69" event={"ID":"ab381bc8-8294-4c0a-91c1-f9bd491c66a1","Type":"ContainerStarted","Data":"30d0e079b68ae12e5bd53125d527fdc098fa7fe3ed290538dcea0df5b74a6d3b"} Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.144695 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d4bfc795c-4fj69" event={"ID":"ab381bc8-8294-4c0a-91c1-f9bd491c66a1","Type":"ContainerStarted","Data":"cb43df24a938808bc870628317da8f1ecc636261f7a74feedbe947881f4a7838"} Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.167486 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.167574 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kl89\" (UniqueName: \"kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.169015 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25t8x\" (UniqueName: \"kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.170438 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.177233 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5d4bfc795c-4fj69" podStartSLOduration=2.177211504 podStartE2EDuration="2.177211504s" podCreationTimestamp="2026-02-03 09:04:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:04:16.170466063 +0000 UTC m=+8294.457159879" watchObservedRunningTime="2026-02-03 09:04:16.177211504 +0000 UTC m=+8294.463905330" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.273901 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kl89\" (UniqueName: \"kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.274103 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25t8x\" (UniqueName: \"kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.274139 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.274205 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.274977 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.275560 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.297262 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25t8x\" (UniqueName: \"kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x\") pod \"heat-b776-account-create-update-bvltf\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.297353 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kl89\" (UniqueName: \"kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89\") pod \"heat-db-create-22hsh\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.331082 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.335262 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.357927 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.375473 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkm4g\" (UniqueName: \"kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.375551 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.375583 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.422630 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.424426 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-22hsh" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.434022 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:04:16 crc kubenswrapper[4998]: E0203 09:04:16.434399 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.478658 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkm4g\" (UniqueName: \"kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.478766 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.478834 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.482178 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.482268 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.507437 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkm4g\" (UniqueName: \"kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g\") pod \"certified-operators-2sbfg\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.739312 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:16 crc kubenswrapper[4998]: I0203 09:04:16.968735 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-b776-account-create-update-bvltf"] Feb 03 09:04:17 crc kubenswrapper[4998]: I0203 09:04:17.069682 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-22hsh"] Feb 03 09:04:17 crc kubenswrapper[4998]: I0203 09:04:17.171659 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-b776-account-create-update-bvltf" event={"ID":"2dd9ecda-c38f-43dd-8002-eacd214a4502","Type":"ContainerStarted","Data":"992ebe782a3887a00fd192f29e61964b7cc51b119a07d85b54fe876756a2e0f5"} Feb 03 09:04:17 crc kubenswrapper[4998]: I0203 09:04:17.177548 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-22hsh" event={"ID":"9946bf42-1fb1-4866-b14d-4ce08d52cde9","Type":"ContainerStarted","Data":"c020bb5716f075ce93f0a348e23978ede93872664e9c5e2e6876d98aba51931b"} Feb 03 09:04:17 crc kubenswrapper[4998]: I0203 09:04:17.331682 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:17 crc kubenswrapper[4998]: W0203 09:04:17.336951 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a44224b_701c_493c_b302_f4ce57fab57f.slice/crio-4b966d4c67097bf90edd21b3555031af8f50e0b5cc2a677963a491713bb52ada WatchSource:0}: Error finding container 4b966d4c67097bf90edd21b3555031af8f50e0b5cc2a677963a491713bb52ada: Status 404 returned error can't find the container with id 4b966d4c67097bf90edd21b3555031af8f50e0b5cc2a677963a491713bb52ada Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.192450 4998 generic.go:334] "Generic (PLEG): container finished" podID="3a44224b-701c-493c-b302-f4ce57fab57f" containerID="7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb" exitCode=0 Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.192567 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerDied","Data":"7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb"} Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.192798 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerStarted","Data":"4b966d4c67097bf90edd21b3555031af8f50e0b5cc2a677963a491713bb52ada"} Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.194607 4998 generic.go:334] "Generic (PLEG): container finished" podID="2dd9ecda-c38f-43dd-8002-eacd214a4502" containerID="9a8665c384b8fbea42750dff0ff54695f474e916caa895dd166e38303f2ce6df" exitCode=0 Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.194672 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-b776-account-create-update-bvltf" event={"ID":"2dd9ecda-c38f-43dd-8002-eacd214a4502","Type":"ContainerDied","Data":"9a8665c384b8fbea42750dff0ff54695f474e916caa895dd166e38303f2ce6df"} Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.198043 4998 generic.go:334] "Generic (PLEG): container finished" podID="9946bf42-1fb1-4866-b14d-4ce08d52cde9" containerID="4eb04a8240dc95bf6d65fd1eab1ee0df93ba950fb50bc64757e0366ae557c921" exitCode=0 Feb 03 09:04:18 crc kubenswrapper[4998]: I0203 09:04:18.198104 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-22hsh" event={"ID":"9946bf42-1fb1-4866-b14d-4ce08d52cde9","Type":"ContainerDied","Data":"4eb04a8240dc95bf6d65fd1eab1ee0df93ba950fb50bc64757e0366ae557c921"} Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.639243 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.729927 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.741416 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.815365 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd8bl\" (UniqueName: \"kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.815478 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.815512 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.850416 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.890272 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-22hsh" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924111 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts\") pod \"2dd9ecda-c38f-43dd-8002-eacd214a4502\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924179 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kl89\" (UniqueName: \"kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89\") pod \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924245 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25t8x\" (UniqueName: \"kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x\") pod \"2dd9ecda-c38f-43dd-8002-eacd214a4502\" (UID: \"2dd9ecda-c38f-43dd-8002-eacd214a4502\") " Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924413 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts\") pod \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\" (UID: \"9946bf42-1fb1-4866-b14d-4ce08d52cde9\") " Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924774 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd8bl\" (UniqueName: \"kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924808 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2dd9ecda-c38f-43dd-8002-eacd214a4502" (UID: "2dd9ecda-c38f-43dd-8002-eacd214a4502"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924951 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.924999 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.925104 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2dd9ecda-c38f-43dd-8002-eacd214a4502-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.925146 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9946bf42-1fb1-4866-b14d-4ce08d52cde9" (UID: "9946bf42-1fb1-4866-b14d-4ce08d52cde9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.927751 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.928062 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.930071 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x" (OuterVolumeSpecName: "kube-api-access-25t8x") pod "2dd9ecda-c38f-43dd-8002-eacd214a4502" (UID: "2dd9ecda-c38f-43dd-8002-eacd214a4502"). InnerVolumeSpecName "kube-api-access-25t8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.947883 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89" (OuterVolumeSpecName: "kube-api-access-7kl89") pod "9946bf42-1fb1-4866-b14d-4ce08d52cde9" (UID: "9946bf42-1fb1-4866-b14d-4ce08d52cde9"). InnerVolumeSpecName "kube-api-access-7kl89". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:19 crc kubenswrapper[4998]: I0203 09:04:19.948696 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd8bl\" (UniqueName: \"kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl\") pod \"redhat-operators-nlfqx\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.033216 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25t8x\" (UniqueName: \"kubernetes.io/projected/2dd9ecda-c38f-43dd-8002-eacd214a4502-kube-api-access-25t8x\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.033510 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9946bf42-1fb1-4866-b14d-4ce08d52cde9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.033580 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kl89\" (UniqueName: \"kubernetes.io/projected/9946bf42-1fb1-4866-b14d-4ce08d52cde9-kube-api-access-7kl89\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.186985 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.239539 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-22hsh" event={"ID":"9946bf42-1fb1-4866-b14d-4ce08d52cde9","Type":"ContainerDied","Data":"c020bb5716f075ce93f0a348e23978ede93872664e9c5e2e6876d98aba51931b"} Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.239586 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c020bb5716f075ce93f0a348e23978ede93872664e9c5e2e6876d98aba51931b" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.239643 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-22hsh" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.246677 4998 generic.go:334] "Generic (PLEG): container finished" podID="3a44224b-701c-493c-b302-f4ce57fab57f" containerID="c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21" exitCode=0 Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.246759 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerDied","Data":"c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21"} Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.250728 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-b776-account-create-update-bvltf" event={"ID":"2dd9ecda-c38f-43dd-8002-eacd214a4502","Type":"ContainerDied","Data":"992ebe782a3887a00fd192f29e61964b7cc51b119a07d85b54fe876756a2e0f5"} Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.250766 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="992ebe782a3887a00fd192f29e61964b7cc51b119a07d85b54fe876756a2e0f5" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.250827 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-b776-account-create-update-bvltf" Feb 03 09:04:20 crc kubenswrapper[4998]: I0203 09:04:20.760006 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.215551 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-bjggq"] Feb 03 09:04:21 crc kubenswrapper[4998]: E0203 09:04:21.216324 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9946bf42-1fb1-4866-b14d-4ce08d52cde9" containerName="mariadb-database-create" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.216342 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="9946bf42-1fb1-4866-b14d-4ce08d52cde9" containerName="mariadb-database-create" Feb 03 09:04:21 crc kubenswrapper[4998]: E0203 09:04:21.216361 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dd9ecda-c38f-43dd-8002-eacd214a4502" containerName="mariadb-account-create-update" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.216369 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dd9ecda-c38f-43dd-8002-eacd214a4502" containerName="mariadb-account-create-update" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.216580 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dd9ecda-c38f-43dd-8002-eacd214a4502" containerName="mariadb-account-create-update" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.216607 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="9946bf42-1fb1-4866-b14d-4ce08d52cde9" containerName="mariadb-database-create" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.217406 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.221863 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.222136 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-ms6lv" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.226573 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-bjggq"] Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.266919 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.266993 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.267113 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq6rc\" (UniqueName: \"kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.267576 4998 generic.go:334] "Generic (PLEG): container finished" podID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerID="b2817c2803aaccae3a0e1b03b011a2359336892b3de0d9a2862b3461ae08f800" exitCode=0 Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.267618 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerDied","Data":"b2817c2803aaccae3a0e1b03b011a2359336892b3de0d9a2862b3461ae08f800"} Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.267642 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerStarted","Data":"3370ff17d331584c812d47364534b3f4376f8716585c80fdd606be1967c7eca1"} Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.368894 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq6rc\" (UniqueName: \"kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.368981 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.369035 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.375895 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.378505 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.389936 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq6rc\" (UniqueName: \"kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc\") pod \"heat-db-sync-bjggq\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:21 crc kubenswrapper[4998]: I0203 09:04:21.543086 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:22 crc kubenswrapper[4998]: I0203 09:04:22.282881 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerStarted","Data":"47b75b6dcba448c5a4b4b4d9e8837542686cab02669ff3623cc264ac975e57cf"} Feb 03 09:04:22 crc kubenswrapper[4998]: I0203 09:04:22.290790 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerStarted","Data":"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f"} Feb 03 09:04:22 crc kubenswrapper[4998]: I0203 09:04:22.338007 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2sbfg" podStartSLOduration=3.644450698 podStartE2EDuration="6.337982315s" podCreationTimestamp="2026-02-03 09:04:16 +0000 UTC" firstStartedPulling="2026-02-03 09:04:18.197288674 +0000 UTC m=+8296.483982480" lastFinishedPulling="2026-02-03 09:04:20.890820281 +0000 UTC m=+8299.177514097" observedRunningTime="2026-02-03 09:04:22.329203185 +0000 UTC m=+8300.615896991" watchObservedRunningTime="2026-02-03 09:04:22.337982315 +0000 UTC m=+8300.624676121" Feb 03 09:04:23 crc kubenswrapper[4998]: I0203 09:04:23.248001 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-bjggq"] Feb 03 09:04:23 crc kubenswrapper[4998]: I0203 09:04:23.303490 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bjggq" event={"ID":"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc","Type":"ContainerStarted","Data":"78d4766f1be88db3cb8b7659a4af37bb1fc3e60b03ac77f785738e9fc62a574b"} Feb 03 09:04:24 crc kubenswrapper[4998]: I0203 09:04:24.340497 4998 generic.go:334] "Generic (PLEG): container finished" podID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerID="47b75b6dcba448c5a4b4b4d9e8837542686cab02669ff3623cc264ac975e57cf" exitCode=0 Feb 03 09:04:24 crc kubenswrapper[4998]: I0203 09:04:24.340549 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerDied","Data":"47b75b6dcba448c5a4b4b4d9e8837542686cab02669ff3623cc264ac975e57cf"} Feb 03 09:04:25 crc kubenswrapper[4998]: I0203 09:04:25.040025 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:25 crc kubenswrapper[4998]: I0203 09:04:25.040374 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:26 crc kubenswrapper[4998]: I0203 09:04:26.392203 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerStarted","Data":"333bea0705398d225bb8ca4ccff594b57081bccaa7992249a29314a0f8d63bbe"} Feb 03 09:04:26 crc kubenswrapper[4998]: I0203 09:04:26.422551 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nlfqx" podStartSLOduration=3.5375810210000003 podStartE2EDuration="7.422529362s" podCreationTimestamp="2026-02-03 09:04:19 +0000 UTC" firstStartedPulling="2026-02-03 09:04:21.269472451 +0000 UTC m=+8299.556166257" lastFinishedPulling="2026-02-03 09:04:25.154420792 +0000 UTC m=+8303.441114598" observedRunningTime="2026-02-03 09:04:26.415234795 +0000 UTC m=+8304.701928601" watchObservedRunningTime="2026-02-03 09:04:26.422529362 +0000 UTC m=+8304.709223168" Feb 03 09:04:26 crc kubenswrapper[4998]: I0203 09:04:26.740509 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:26 crc kubenswrapper[4998]: I0203 09:04:26.740564 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:27 crc kubenswrapper[4998]: I0203 09:04:27.798319 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-2sbfg" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="registry-server" probeResult="failure" output=< Feb 03 09:04:27 crc kubenswrapper[4998]: timeout: failed to connect service ":50051" within 1s Feb 03 09:04:27 crc kubenswrapper[4998]: > Feb 03 09:04:30 crc kubenswrapper[4998]: I0203 09:04:30.187275 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:30 crc kubenswrapper[4998]: I0203 09:04:30.187812 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:30 crc kubenswrapper[4998]: I0203 09:04:30.432672 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:04:30 crc kubenswrapper[4998]: E0203 09:04:30.432981 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:31 crc kubenswrapper[4998]: I0203 09:04:31.257445 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlfqx" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="registry-server" probeResult="failure" output=< Feb 03 09:04:31 crc kubenswrapper[4998]: timeout: failed to connect service ":50051" within 1s Feb 03 09:04:31 crc kubenswrapper[4998]: > Feb 03 09:04:33 crc kubenswrapper[4998]: I0203 09:04:33.530051 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bjggq" event={"ID":"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc","Type":"ContainerStarted","Data":"caf2fe11ad7e12b30a1806da5b29690eabe454cf6b5324435c33c84e6e7bfcb3"} Feb 03 09:04:33 crc kubenswrapper[4998]: I0203 09:04:33.549583 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-bjggq" podStartSLOduration=3.449395796 podStartE2EDuration="12.549567395s" podCreationTimestamp="2026-02-03 09:04:21 +0000 UTC" firstStartedPulling="2026-02-03 09:04:23.251681874 +0000 UTC m=+8301.538375680" lastFinishedPulling="2026-02-03 09:04:32.351853473 +0000 UTC m=+8310.638547279" observedRunningTime="2026-02-03 09:04:33.541972679 +0000 UTC m=+8311.828666485" watchObservedRunningTime="2026-02-03 09:04:33.549567395 +0000 UTC m=+8311.836261201" Feb 03 09:04:35 crc kubenswrapper[4998]: I0203 09:04:35.041858 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d4bfc795c-4fj69" podUID="ab381bc8-8294-4c0a-91c1-f9bd491c66a1" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.129:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.129:8080: connect: connection refused" Feb 03 09:04:35 crc kubenswrapper[4998]: I0203 09:04:35.553074 4998 generic.go:334] "Generic (PLEG): container finished" podID="8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" containerID="caf2fe11ad7e12b30a1806da5b29690eabe454cf6b5324435c33c84e6e7bfcb3" exitCode=0 Feb 03 09:04:35 crc kubenswrapper[4998]: I0203 09:04:35.553220 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bjggq" event={"ID":"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc","Type":"ContainerDied","Data":"caf2fe11ad7e12b30a1806da5b29690eabe454cf6b5324435c33c84e6e7bfcb3"} Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.803163 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.851727 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.855553 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.942409 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle\") pod \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.942498 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data\") pod \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.942541 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq6rc\" (UniqueName: \"kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc\") pod \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\" (UID: \"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc\") " Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.947619 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc" (OuterVolumeSpecName: "kube-api-access-pq6rc") pod "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" (UID: "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc"). InnerVolumeSpecName "kube-api-access-pq6rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:36 crc kubenswrapper[4998]: I0203 09:04:36.977556 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" (UID: "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.009582 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data" (OuterVolumeSpecName: "config-data") pod "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" (UID: "8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.045419 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.045478 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.045498 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq6rc\" (UniqueName: \"kubernetes.io/projected/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc-kube-api-access-pq6rc\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.051693 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.576330 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bjggq" event={"ID":"8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc","Type":"ContainerDied","Data":"78d4766f1be88db3cb8b7659a4af37bb1fc3e60b03ac77f785738e9fc62a574b"} Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.576752 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78d4766f1be88db3cb8b7659a4af37bb1fc3e60b03ac77f785738e9fc62a574b" Feb 03 09:04:37 crc kubenswrapper[4998]: I0203 09:04:37.576879 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bjggq" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.583752 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2sbfg" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="registry-server" containerID="cri-o://36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f" gracePeriod=2 Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.735450 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-7d96fb7486-gb7dx"] Feb 03 09:04:38 crc kubenswrapper[4998]: E0203 09:04:38.736182 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" containerName="heat-db-sync" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.736200 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" containerName="heat-db-sync" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.742601 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" containerName="heat-db-sync" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.743436 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.749916 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.750049 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-ms6lv" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.753232 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.781718 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data-custom\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.781803 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.781912 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv8cm\" (UniqueName: \"kubernetes.io/projected/86d4c222-a376-4173-b257-eed1ca5fbd5c-kube-api-access-mv8cm\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.782080 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-combined-ca-bundle\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.791370 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7d96fb7486-gb7dx"] Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.885093 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-combined-ca-bundle\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.885207 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data-custom\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.885234 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.885265 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv8cm\" (UniqueName: \"kubernetes.io/projected/86d4c222-a376-4173-b257-eed1ca5fbd5c-kube-api-access-mv8cm\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.893202 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.905976 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-config-data-custom\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.909217 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4c222-a376-4173-b257-eed1ca5fbd5c-combined-ca-bundle\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.916903 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv8cm\" (UniqueName: \"kubernetes.io/projected/86d4c222-a376-4173-b257-eed1ca5fbd5c-kube-api-access-mv8cm\") pod \"heat-engine-7d96fb7486-gb7dx\" (UID: \"86d4c222-a376-4173-b257-eed1ca5fbd5c\") " pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.938738 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-56b7876679-z7x8v"] Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.960382 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.970498 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.987951 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-586cd89d46-clnps"] Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.989216 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.990199 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltvq5\" (UniqueName: \"kubernetes.io/projected/94c40d17-0169-47e7-b150-10a8a4a06c89-kube-api-access-ltvq5\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.990256 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-combined-ca-bundle\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.990308 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.990342 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data-custom\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.992238 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-56b7876679-z7x8v"] Feb 03 09:04:38 crc kubenswrapper[4998]: I0203 09:04:38.996966 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.001941 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-586cd89d46-clnps"] Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.075933 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092165 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-combined-ca-bundle\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092226 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltvq5\" (UniqueName: \"kubernetes.io/projected/94c40d17-0169-47e7-b150-10a8a4a06c89-kube-api-access-ltvq5\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092293 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-combined-ca-bundle\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092339 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data-custom\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092381 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092414 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data-custom\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092450 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rntdk\" (UniqueName: \"kubernetes.io/projected/60f33157-352c-49ac-ba85-ae425330ba78-kube-api-access-rntdk\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.092503 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.099678 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data-custom\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.100613 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-config-data\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.112528 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94c40d17-0169-47e7-b150-10a8a4a06c89-combined-ca-bundle\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.114493 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltvq5\" (UniqueName: \"kubernetes.io/projected/94c40d17-0169-47e7-b150-10a8a4a06c89-kube-api-access-ltvq5\") pod \"heat-api-56b7876679-z7x8v\" (UID: \"94c40d17-0169-47e7-b150-10a8a4a06c89\") " pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.194885 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-combined-ca-bundle\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.195336 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data-custom\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.195652 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rntdk\" (UniqueName: \"kubernetes.io/projected/60f33157-352c-49ac-ba85-ae425330ba78-kube-api-access-rntdk\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.195715 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.202195 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.206600 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-combined-ca-bundle\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.220719 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60f33157-352c-49ac-ba85-ae425330ba78-config-data-custom\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.233154 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rntdk\" (UniqueName: \"kubernetes.io/projected/60f33157-352c-49ac-ba85-ae425330ba78-kube-api-access-rntdk\") pod \"heat-cfnapi-586cd89d46-clnps\" (UID: \"60f33157-352c-49ac-ba85-ae425330ba78\") " pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.302776 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.307235 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.323768 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.397748 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content\") pod \"3a44224b-701c-493c-b302-f4ce57fab57f\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.398108 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkm4g\" (UniqueName: \"kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g\") pod \"3a44224b-701c-493c-b302-f4ce57fab57f\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.398152 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities\") pod \"3a44224b-701c-493c-b302-f4ce57fab57f\" (UID: \"3a44224b-701c-493c-b302-f4ce57fab57f\") " Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.446998 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities" (OuterVolumeSpecName: "utilities") pod "3a44224b-701c-493c-b302-f4ce57fab57f" (UID: "3a44224b-701c-493c-b302-f4ce57fab57f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.453687 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g" (OuterVolumeSpecName: "kube-api-access-kkm4g") pod "3a44224b-701c-493c-b302-f4ce57fab57f" (UID: "3a44224b-701c-493c-b302-f4ce57fab57f"). InnerVolumeSpecName "kube-api-access-kkm4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.487727 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a44224b-701c-493c-b302-f4ce57fab57f" (UID: "3a44224b-701c-493c-b302-f4ce57fab57f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.501097 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.501136 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkm4g\" (UniqueName: \"kubernetes.io/projected/3a44224b-701c-493c-b302-f4ce57fab57f-kube-api-access-kkm4g\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.501149 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a44224b-701c-493c-b302-f4ce57fab57f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.648924 4998 generic.go:334] "Generic (PLEG): container finished" podID="3a44224b-701c-493c-b302-f4ce57fab57f" containerID="36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f" exitCode=0 Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.648964 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerDied","Data":"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f"} Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.648990 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2sbfg" event={"ID":"3a44224b-701c-493c-b302-f4ce57fab57f","Type":"ContainerDied","Data":"4b966d4c67097bf90edd21b3555031af8f50e0b5cc2a677963a491713bb52ada"} Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.649008 4998 scope.go:117] "RemoveContainer" containerID="36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.649418 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2sbfg" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.696140 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.711228 4998 scope.go:117] "RemoveContainer" containerID="c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.725211 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2sbfg"] Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.785457 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7d96fb7486-gb7dx"] Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.799849 4998 scope.go:117] "RemoveContainer" containerID="7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.876580 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-56b7876679-z7x8v"] Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.964987 4998 scope.go:117] "RemoveContainer" containerID="36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f" Feb 03 09:04:39 crc kubenswrapper[4998]: E0203 09:04:39.967659 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f\": container with ID starting with 36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f not found: ID does not exist" containerID="36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.967713 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f"} err="failed to get container status \"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f\": rpc error: code = NotFound desc = could not find container \"36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f\": container with ID starting with 36e178ea6d8dec1cc3b0916cd59f71b09d95b60cc9be8f5f4ecc12b44ad8989f not found: ID does not exist" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.967948 4998 scope.go:117] "RemoveContainer" containerID="c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21" Feb 03 09:04:39 crc kubenswrapper[4998]: E0203 09:04:39.969665 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21\": container with ID starting with c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21 not found: ID does not exist" containerID="c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.969696 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21"} err="failed to get container status \"c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21\": rpc error: code = NotFound desc = could not find container \"c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21\": container with ID starting with c664bdfd4807b93c13aeab7c7a600d5c73f17d618b933518e11b525481bd5a21 not found: ID does not exist" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.969718 4998 scope.go:117] "RemoveContainer" containerID="7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb" Feb 03 09:04:39 crc kubenswrapper[4998]: E0203 09:04:39.970155 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb\": container with ID starting with 7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb not found: ID does not exist" containerID="7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.970187 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb"} err="failed to get container status \"7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb\": rpc error: code = NotFound desc = could not find container \"7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb\": container with ID starting with 7dd9c25385683641868754035985c012f18d6e147e46a10f90dac68f3f59fbcb not found: ID does not exist" Feb 03 09:04:39 crc kubenswrapper[4998]: I0203 09:04:39.977184 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-586cd89d46-clnps"] Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.242924 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.315323 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.445287 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" path="/var/lib/kubelet/pods/3a44224b-701c-493c-b302-f4ce57fab57f/volumes" Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.669404 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7d96fb7486-gb7dx" event={"ID":"86d4c222-a376-4173-b257-eed1ca5fbd5c","Type":"ContainerStarted","Data":"7ed81d327d765f4f169de0feabd2e584e265cd68f5708f8ebd53833fee1562ca"} Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.669441 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7d96fb7486-gb7dx" event={"ID":"86d4c222-a376-4173-b257-eed1ca5fbd5c","Type":"ContainerStarted","Data":"6c2d86e21ed86340839e453f61f34da1b90069c5dd995fa965eccad295b2902d"} Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.669618 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.673025 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-56b7876679-z7x8v" event={"ID":"94c40d17-0169-47e7-b150-10a8a4a06c89","Type":"ContainerStarted","Data":"22929b80a32c7d94384119e7ec129e1e1a9da1b83152d75e40989fb6a219fc6a"} Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.676868 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-586cd89d46-clnps" event={"ID":"60f33157-352c-49ac-ba85-ae425330ba78","Type":"ContainerStarted","Data":"ab20e4e7d0a7872eb17dc0b1308b0114036ee888e59a8912e2d08268a634e799"} Feb 03 09:04:40 crc kubenswrapper[4998]: I0203 09:04:40.696753 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-7d96fb7486-gb7dx" podStartSLOduration=2.696730548 podStartE2EDuration="2.696730548s" podCreationTimestamp="2026-02-03 09:04:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:04:40.688094013 +0000 UTC m=+8318.974787849" watchObservedRunningTime="2026-02-03 09:04:40.696730548 +0000 UTC m=+8318.983424374" Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.490282 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.493831 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nlfqx" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="registry-server" containerID="cri-o://333bea0705398d225bb8ca4ccff594b57081bccaa7992249a29314a0f8d63bbe" gracePeriod=2 Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.703567 4998 generic.go:334] "Generic (PLEG): container finished" podID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerID="333bea0705398d225bb8ca4ccff594b57081bccaa7992249a29314a0f8d63bbe" exitCode=0 Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.703825 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerDied","Data":"333bea0705398d225bb8ca4ccff594b57081bccaa7992249a29314a0f8d63bbe"} Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.705735 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-586cd89d46-clnps" event={"ID":"60f33157-352c-49ac-ba85-ae425330ba78","Type":"ContainerStarted","Data":"9762fc6f0ed11b311afa16a8f7b9ec7652b7ca8543030ab99d93ec5224b27f68"} Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.708631 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.709345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-56b7876679-z7x8v" event={"ID":"94c40d17-0169-47e7-b150-10a8a4a06c89","Type":"ContainerStarted","Data":"52c7521570d6fde986d69999a4d7aa77554472a407b934fac56cd5f2307c43f0"} Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.709463 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.735273 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-586cd89d46-clnps" podStartSLOduration=3.161474382 podStartE2EDuration="4.735253731s" podCreationTimestamp="2026-02-03 09:04:38 +0000 UTC" firstStartedPulling="2026-02-03 09:04:39.969320587 +0000 UTC m=+8318.256014413" lastFinishedPulling="2026-02-03 09:04:41.543099956 +0000 UTC m=+8319.829793762" observedRunningTime="2026-02-03 09:04:42.72674534 +0000 UTC m=+8321.013439146" watchObservedRunningTime="2026-02-03 09:04:42.735253731 +0000 UTC m=+8321.021947537" Feb 03 09:04:42 crc kubenswrapper[4998]: I0203 09:04:42.824146 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-56b7876679-z7x8v" podStartSLOduration=3.163312935 podStartE2EDuration="4.824111504s" podCreationTimestamp="2026-02-03 09:04:38 +0000 UTC" firstStartedPulling="2026-02-03 09:04:39.8880455 +0000 UTC m=+8318.174739306" lastFinishedPulling="2026-02-03 09:04:41.548844069 +0000 UTC m=+8319.835537875" observedRunningTime="2026-02-03 09:04:42.768919027 +0000 UTC m=+8321.055612853" watchObservedRunningTime="2026-02-03 09:04:42.824111504 +0000 UTC m=+8321.110805310" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.252015 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.411147 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content\") pod \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.411187 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd8bl\" (UniqueName: \"kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl\") pod \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.411307 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities\") pod \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\" (UID: \"b54e29c9-4ffb-40de-8cb7-aade40a34c1d\") " Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.412222 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities" (OuterVolumeSpecName: "utilities") pod "b54e29c9-4ffb-40de-8cb7-aade40a34c1d" (UID: "b54e29c9-4ffb-40de-8cb7-aade40a34c1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.418067 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl" (OuterVolumeSpecName: "kube-api-access-qd8bl") pod "b54e29c9-4ffb-40de-8cb7-aade40a34c1d" (UID: "b54e29c9-4ffb-40de-8cb7-aade40a34c1d"). InnerVolumeSpecName "kube-api-access-qd8bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.428106 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:04:43 crc kubenswrapper[4998]: E0203 09:04:43.428414 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.514106 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.514140 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd8bl\" (UniqueName: \"kubernetes.io/projected/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-kube-api-access-qd8bl\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.577218 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b54e29c9-4ffb-40de-8cb7-aade40a34c1d" (UID: "b54e29c9-4ffb-40de-8cb7-aade40a34c1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.616328 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b54e29c9-4ffb-40de-8cb7-aade40a34c1d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.722605 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlfqx" event={"ID":"b54e29c9-4ffb-40de-8cb7-aade40a34c1d","Type":"ContainerDied","Data":"3370ff17d331584c812d47364534b3f4376f8716585c80fdd606be1967c7eca1"} Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.722915 4998 scope.go:117] "RemoveContainer" containerID="333bea0705398d225bb8ca4ccff594b57081bccaa7992249a29314a0f8d63bbe" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.723295 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlfqx" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.766858 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.768641 4998 scope.go:117] "RemoveContainer" containerID="47b75b6dcba448c5a4b4b4d9e8837542686cab02669ff3623cc264ac975e57cf" Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.777896 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nlfqx"] Feb 03 09:04:43 crc kubenswrapper[4998]: I0203 09:04:43.795381 4998 scope.go:117] "RemoveContainer" containerID="b2817c2803aaccae3a0e1b03b011a2359336892b3de0d9a2862b3461ae08f800" Feb 03 09:04:44 crc kubenswrapper[4998]: I0203 09:04:44.442482 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" path="/var/lib/kubelet/pods/b54e29c9-4ffb-40de-8cb7-aade40a34c1d/volumes" Feb 03 09:04:46 crc kubenswrapper[4998]: I0203 09:04:46.932625 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:48 crc kubenswrapper[4998]: I0203 09:04:48.638004 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5d4bfc795c-4fj69" Feb 03 09:04:48 crc kubenswrapper[4998]: I0203 09:04:48.710488 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:04:48 crc kubenswrapper[4998]: I0203 09:04:48.710794 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon-log" containerID="cri-o://0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068" gracePeriod=30 Feb 03 09:04:48 crc kubenswrapper[4998]: I0203 09:04:48.711004 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" containerID="cri-o://dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c" gracePeriod=30 Feb 03 09:04:50 crc kubenswrapper[4998]: I0203 09:04:50.701217 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-56b7876679-z7x8v" Feb 03 09:04:50 crc kubenswrapper[4998]: I0203 09:04:50.850368 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-586cd89d46-clnps" Feb 03 09:04:51 crc kubenswrapper[4998]: E0203 09:04:51.997200 4998 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod653c897b_8672_405d_9e1e_e877f22d452e.slice/crio-conmon-dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod653c897b_8672_405d_9e1e_e877f22d452e.slice/crio-dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c.scope\": RecentStats: unable to find data in memory cache]" Feb 03 09:04:52 crc kubenswrapper[4998]: I0203 09:04:52.399259 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.126:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.126:8080: connect: connection refused" Feb 03 09:04:52 crc kubenswrapper[4998]: I0203 09:04:52.830166 4998 generic.go:334] "Generic (PLEG): container finished" podID="653c897b-8672-405d-9e1e-e877f22d452e" containerID="dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c" exitCode=0 Feb 03 09:04:52 crc kubenswrapper[4998]: I0203 09:04:52.830295 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerDied","Data":"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c"} Feb 03 09:04:58 crc kubenswrapper[4998]: I0203 09:04:58.428540 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:04:58 crc kubenswrapper[4998]: E0203 09:04:58.429286 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:04:59 crc kubenswrapper[4998]: I0203 09:04:59.107918 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-7d96fb7486-gb7dx" Feb 03 09:05:02 crc kubenswrapper[4998]: I0203 09:05:02.400048 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.126:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.126:8080: connect: connection refused" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.900420 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq"] Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901441 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="extract-utilities" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901457 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="extract-utilities" Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901466 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="extract-content" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901472 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="extract-content" Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901486 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="extract-content" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901492 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="extract-content" Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901512 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901518 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901527 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="extract-utilities" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901533 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="extract-utilities" Feb 03 09:05:07 crc kubenswrapper[4998]: E0203 09:05:07.901555 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901560 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901739 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="b54e29c9-4ffb-40de-8cb7-aade40a34c1d" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.901754 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a44224b-701c-493c-b302-f4ce57fab57f" containerName="registry-server" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.907262 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.909468 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 09:05:07 crc kubenswrapper[4998]: I0203 09:05:07.914260 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq"] Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.045836 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.046168 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfhhj\" (UniqueName: \"kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.046411 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.147919 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfhhj\" (UniqueName: \"kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.148008 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.148053 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.148662 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.148703 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.183568 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfhhj\" (UniqueName: \"kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.227754 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.708516 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq"] Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.996357 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerStarted","Data":"ae582d9be9f292125488744600cfabd86ee00edcd04b4fccf59745b5cd0d8250"} Feb 03 09:05:08 crc kubenswrapper[4998]: I0203 09:05:08.996695 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerStarted","Data":"36a8423f2a87e74e8bfe8ce233bbd862fe78f63a212d83c134c952318a5593c3"} Feb 03 09:05:10 crc kubenswrapper[4998]: I0203 09:05:10.022847 4998 generic.go:334] "Generic (PLEG): container finished" podID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerID="ae582d9be9f292125488744600cfabd86ee00edcd04b4fccf59745b5cd0d8250" exitCode=0 Feb 03 09:05:10 crc kubenswrapper[4998]: I0203 09:05:10.022989 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerDied","Data":"ae582d9be9f292125488744600cfabd86ee00edcd04b4fccf59745b5cd0d8250"} Feb 03 09:05:11 crc kubenswrapper[4998]: I0203 09:05:11.428331 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:05:11 crc kubenswrapper[4998]: E0203 09:05:11.428973 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:05:12 crc kubenswrapper[4998]: I0203 09:05:12.048091 4998 generic.go:334] "Generic (PLEG): container finished" podID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerID="a3876e7478715581b86f9aaa49b1c79c35e103534a311bc1241484405c2a1217" exitCode=0 Feb 03 09:05:12 crc kubenswrapper[4998]: I0203 09:05:12.048159 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerDied","Data":"a3876e7478715581b86f9aaa49b1c79c35e103534a311bc1241484405c2a1217"} Feb 03 09:05:12 crc kubenswrapper[4998]: I0203 09:05:12.398676 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7cfddfbbcc-mxshd" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.126:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.126:8080: connect: connection refused" Feb 03 09:05:12 crc kubenswrapper[4998]: I0203 09:05:12.399051 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:05:13 crc kubenswrapper[4998]: I0203 09:05:13.061177 4998 generic.go:334] "Generic (PLEG): container finished" podID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerID="222a6174d2294d1d80fe92ab0ccb067d30d55a99061267ce1869be4b099c3178" exitCode=0 Feb 03 09:05:13 crc kubenswrapper[4998]: I0203 09:05:13.061245 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerDied","Data":"222a6174d2294d1d80fe92ab0ccb067d30d55a99061267ce1869be4b099c3178"} Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.460230 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.593246 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfhhj\" (UniqueName: \"kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj\") pod \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.594174 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util\") pod \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.594330 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle\") pod \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\" (UID: \"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14\") " Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.596322 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle" (OuterVolumeSpecName: "bundle") pod "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" (UID: "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.600319 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj" (OuterVolumeSpecName: "kube-api-access-rfhhj") pod "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" (UID: "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14"). InnerVolumeSpecName "kube-api-access-rfhhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.604876 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util" (OuterVolumeSpecName: "util") pod "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" (UID: "6dbe9085-4b64-4d38-93cb-9ff53a5e4e14"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.697396 4998 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-util\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.697628 4998 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:14 crc kubenswrapper[4998]: I0203 09:05:14.697732 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfhhj\" (UniqueName: \"kubernetes.io/projected/6dbe9085-4b64-4d38-93cb-9ff53a5e4e14-kube-api-access-rfhhj\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:15 crc kubenswrapper[4998]: I0203 09:05:15.084851 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" event={"ID":"6dbe9085-4b64-4d38-93cb-9ff53a5e4e14","Type":"ContainerDied","Data":"36a8423f2a87e74e8bfe8ce233bbd862fe78f63a212d83c134c952318a5593c3"} Feb 03 09:05:15 crc kubenswrapper[4998]: I0203 09:05:15.084922 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36a8423f2a87e74e8bfe8ce233bbd862fe78f63a212d83c134c952318a5593c3" Feb 03 09:05:15 crc kubenswrapper[4998]: I0203 09:05:15.084922 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.133978 4998 generic.go:334] "Generic (PLEG): container finished" podID="653c897b-8672-405d-9e1e-e877f22d452e" containerID="0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068" exitCode=137 Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.134030 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.134087 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerDied","Data":"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068"} Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.134629 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cfddfbbcc-mxshd" event={"ID":"653c897b-8672-405d-9e1e-e877f22d452e","Type":"ContainerDied","Data":"627ba7b30f3be4a2360a9f881daeb3691d35cb13dcecd6c3e5e773fc2894da6e"} Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.134659 4998 scope.go:117] "RemoveContainer" containerID="dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224247 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs\") pod \"653c897b-8672-405d-9e1e-e877f22d452e\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224331 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5tcx\" (UniqueName: \"kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx\") pod \"653c897b-8672-405d-9e1e-e877f22d452e\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224417 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data\") pod \"653c897b-8672-405d-9e1e-e877f22d452e\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224499 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts\") pod \"653c897b-8672-405d-9e1e-e877f22d452e\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224521 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key\") pod \"653c897b-8672-405d-9e1e-e877f22d452e\" (UID: \"653c897b-8672-405d-9e1e-e877f22d452e\") " Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.224725 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs" (OuterVolumeSpecName: "logs") pod "653c897b-8672-405d-9e1e-e877f22d452e" (UID: "653c897b-8672-405d-9e1e-e877f22d452e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.225105 4998 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/653c897b-8672-405d-9e1e-e877f22d452e-logs\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.231479 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx" (OuterVolumeSpecName: "kube-api-access-h5tcx") pod "653c897b-8672-405d-9e1e-e877f22d452e" (UID: "653c897b-8672-405d-9e1e-e877f22d452e"). InnerVolumeSpecName "kube-api-access-h5tcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.245030 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "653c897b-8672-405d-9e1e-e877f22d452e" (UID: "653c897b-8672-405d-9e1e-e877f22d452e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.255432 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts" (OuterVolumeSpecName: "scripts") pod "653c897b-8672-405d-9e1e-e877f22d452e" (UID: "653c897b-8672-405d-9e1e-e877f22d452e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.256049 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data" (OuterVolumeSpecName: "config-data") pod "653c897b-8672-405d-9e1e-e877f22d452e" (UID: "653c897b-8672-405d-9e1e-e877f22d452e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.311547 4998 scope.go:117] "RemoveContainer" containerID="0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.326524 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5tcx\" (UniqueName: \"kubernetes.io/projected/653c897b-8672-405d-9e1e-e877f22d452e-kube-api-access-h5tcx\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.326561 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.326573 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653c897b-8672-405d-9e1e-e877f22d452e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.326584 4998 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/653c897b-8672-405d-9e1e-e877f22d452e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.341471 4998 scope.go:117] "RemoveContainer" containerID="dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c" Feb 03 09:05:19 crc kubenswrapper[4998]: E0203 09:05:19.341943 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c\": container with ID starting with dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c not found: ID does not exist" containerID="dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.341974 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c"} err="failed to get container status \"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c\": rpc error: code = NotFound desc = could not find container \"dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c\": container with ID starting with dc45bbf3711fd676156644439365b02c24a3681cfdb8cf65751bd268bf984d6c not found: ID does not exist" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.341993 4998 scope.go:117] "RemoveContainer" containerID="0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068" Feb 03 09:05:19 crc kubenswrapper[4998]: E0203 09:05:19.342320 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068\": container with ID starting with 0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068 not found: ID does not exist" containerID="0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068" Feb 03 09:05:19 crc kubenswrapper[4998]: I0203 09:05:19.342340 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068"} err="failed to get container status \"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068\": rpc error: code = NotFound desc = could not find container \"0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068\": container with ID starting with 0d2a14409835e97df29bb9a8944b9028284d9694f624cfdf0bc81bddb46be068 not found: ID does not exist" Feb 03 09:05:20 crc kubenswrapper[4998]: I0203 09:05:20.145071 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cfddfbbcc-mxshd" Feb 03 09:05:20 crc kubenswrapper[4998]: I0203 09:05:20.190022 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:05:20 crc kubenswrapper[4998]: I0203 09:05:20.203795 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7cfddfbbcc-mxshd"] Feb 03 09:05:20 crc kubenswrapper[4998]: I0203 09:05:20.440918 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="653c897b-8672-405d-9e1e-e877f22d452e" path="/var/lib/kubelet/pods/653c897b-8672-405d-9e1e-e877f22d452e/volumes" Feb 03 09:05:23 crc kubenswrapper[4998]: I0203 09:05:23.427477 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:05:23 crc kubenswrapper[4998]: E0203 09:05:23.428102 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.508725 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc"] Feb 03 09:05:26 crc kubenswrapper[4998]: E0203 09:05:26.509521 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon-log" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509535 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon-log" Feb 03 09:05:26 crc kubenswrapper[4998]: E0203 09:05:26.509559 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="util" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509565 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="util" Feb 03 09:05:26 crc kubenswrapper[4998]: E0203 09:05:26.509583 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509590 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" Feb 03 09:05:26 crc kubenswrapper[4998]: E0203 09:05:26.509616 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="extract" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509623 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="extract" Feb 03 09:05:26 crc kubenswrapper[4998]: E0203 09:05:26.509631 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="pull" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509637 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="pull" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509820 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbe9085-4b64-4d38-93cb-9ff53a5e4e14" containerName="extract" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509833 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon-log" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.509854 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="653c897b-8672-405d-9e1e-e877f22d452e" containerName="horizon" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.510536 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.520889 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.521023 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-c2phz" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.521109 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.536835 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.577840 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9ccb\" (UniqueName: \"kubernetes.io/projected/4796b22a-2d45-4db2-ad47-e5e010c0fd02-kube-api-access-s9ccb\") pod \"obo-prometheus-operator-68bc856cb9-f7cmc\" (UID: \"4796b22a-2d45-4db2-ad47-e5e010c0fd02\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.646156 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.648607 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.650869 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-d8rsf" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.651428 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.662569 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.663948 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.675971 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.679490 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9ccb\" (UniqueName: \"kubernetes.io/projected/4796b22a-2d45-4db2-ad47-e5e010c0fd02-kube-api-access-s9ccb\") pod \"obo-prometheus-operator-68bc856cb9-f7cmc\" (UID: \"4796b22a-2d45-4db2-ad47-e5e010c0fd02\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.687253 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.748153 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9ccb\" (UniqueName: \"kubernetes.io/projected/4796b22a-2d45-4db2-ad47-e5e010c0fd02-kube-api-access-s9ccb\") pod \"obo-prometheus-operator-68bc856cb9-f7cmc\" (UID: \"4796b22a-2d45-4db2-ad47-e5e010c0fd02\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.781452 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.781510 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.781536 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.781576 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.848249 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-dhcsw"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.849644 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.851249 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.853349 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-pm6l5" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.875319 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.883170 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-dhcsw"] Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.890879 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.890946 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.890978 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.891054 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.896422 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.907237 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.930329 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/476d94bb-903b-4592-af4f-57cc905396aa-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7\" (UID: \"476d94bb-903b-4592-af4f-57cc905396aa\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.931438 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a336920f-f4e8-4073-b392-bd5b2a4dffa3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z\" (UID: \"a336920f-f4e8-4073-b392-bd5b2a4dffa3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.985668 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.997261 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/96c6743e-239d-4bbc-adf2-4012a6af6282-observability-operator-tls\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:26 crc kubenswrapper[4998]: I0203 09:05:26.997331 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8v89\" (UniqueName: \"kubernetes.io/projected/96c6743e-239d-4bbc-adf2-4012a6af6282-kube-api-access-v8v89\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.006619 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.056551 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e639-account-create-update-btlh6"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.074492 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-g9cxn"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.090379 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-e639-account-create-update-btlh6"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.099251 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/96c6743e-239d-4bbc-adf2-4012a6af6282-observability-operator-tls\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.099320 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8v89\" (UniqueName: \"kubernetes.io/projected/96c6743e-239d-4bbc-adf2-4012a6af6282-kube-api-access-v8v89\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.107031 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-g9cxn"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.110565 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/96c6743e-239d-4bbc-adf2-4012a6af6282-observability-operator-tls\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.116586 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-t2wss"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.118033 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.127140 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-t2wss"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.127182 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-84lp5" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.134481 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8v89\" (UniqueName: \"kubernetes.io/projected/96c6743e-239d-4bbc-adf2-4012a6af6282-kube-api-access-v8v89\") pod \"observability-operator-59bdc8b94-dhcsw\" (UID: \"96c6743e-239d-4bbc-adf2-4012a6af6282\") " pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.170534 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.202738 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-openshift-service-ca\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.203399 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wwrw\" (UniqueName: \"kubernetes.io/projected/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-kube-api-access-6wwrw\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.305952 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-openshift-service-ca\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.306167 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wwrw\" (UniqueName: \"kubernetes.io/projected/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-kube-api-access-6wwrw\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.307415 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-openshift-service-ca\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.326673 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wwrw\" (UniqueName: \"kubernetes.io/projected/9f65ecc3-6c59-4f35-b5b4-927c48f1f89b-kube-api-access-6wwrw\") pod \"perses-operator-5bf474d74f-t2wss\" (UID: \"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b\") " pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.450512 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.705448 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z"] Feb 03 09:05:27 crc kubenswrapper[4998]: W0203 09:05:27.708965 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda336920f_f4e8_4073_b392_bd5b2a4dffa3.slice/crio-e03daf1e32e41bbd6743bb2192530f17e5a991dc5e4f850bae2f35bcb1b0dc40 WatchSource:0}: Error finding container e03daf1e32e41bbd6743bb2192530f17e5a991dc5e4f850bae2f35bcb1b0dc40: Status 404 returned error can't find the container with id e03daf1e32e41bbd6743bb2192530f17e5a991dc5e4f850bae2f35bcb1b0dc40 Feb 03 09:05:27 crc kubenswrapper[4998]: W0203 09:05:27.713261 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod476d94bb_903b_4592_af4f_57cc905396aa.slice/crio-cbfe36f91682544c635c825220c1594a029c947e7255b87ae20f30b18178017f WatchSource:0}: Error finding container cbfe36f91682544c635c825220c1594a029c947e7255b87ae20f30b18178017f: Status 404 returned error can't find the container with id cbfe36f91682544c635c825220c1594a029c947e7255b87ae20f30b18178017f Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.717776 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.734438 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.876520 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-t2wss"] Feb 03 09:05:27 crc kubenswrapper[4998]: I0203 09:05:27.954245 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-dhcsw"] Feb 03 09:05:27 crc kubenswrapper[4998]: W0203 09:05:27.954576 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96c6743e_239d_4bbc_adf2_4012a6af6282.slice/crio-104f87cf1e5c7450d4ad6953776b3cfaab0b2e2326e8b79ed37c8a991b854b7d WatchSource:0}: Error finding container 104f87cf1e5c7450d4ad6953776b3cfaab0b2e2326e8b79ed37c8a991b854b7d: Status 404 returned error can't find the container with id 104f87cf1e5c7450d4ad6953776b3cfaab0b2e2326e8b79ed37c8a991b854b7d Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.248217 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" event={"ID":"476d94bb-903b-4592-af4f-57cc905396aa","Type":"ContainerStarted","Data":"cbfe36f91682544c635c825220c1594a029c947e7255b87ae20f30b18178017f"} Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.249844 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" event={"ID":"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b","Type":"ContainerStarted","Data":"df2259abc131e94d1dfd31cc6ce274750a0db5a5a99bbae0b15c7c93c47489e9"} Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.283078 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" event={"ID":"4796b22a-2d45-4db2-ad47-e5e010c0fd02","Type":"ContainerStarted","Data":"77c5eec18ab6c8684fe630077c671dbb25b52b552cbbfb03539b2b8fe33391fa"} Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.297605 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" event={"ID":"96c6743e-239d-4bbc-adf2-4012a6af6282","Type":"ContainerStarted","Data":"104f87cf1e5c7450d4ad6953776b3cfaab0b2e2326e8b79ed37c8a991b854b7d"} Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.311954 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" event={"ID":"a336920f-f4e8-4073-b392-bd5b2a4dffa3","Type":"ContainerStarted","Data":"e03daf1e32e41bbd6743bb2192530f17e5a991dc5e4f850bae2f35bcb1b0dc40"} Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.439972 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06811dbe-b72e-435d-b0a3-474209808036" path="/var/lib/kubelet/pods/06811dbe-b72e-435d-b0a3-474209808036/volumes" Feb 03 09:05:28 crc kubenswrapper[4998]: I0203 09:05:28.440825 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79bf0736-7674-434e-9bd1-25f3950556d6" path="/var/lib/kubelet/pods/79bf0736-7674-434e-9bd1-25f3950556d6/volumes" Feb 03 09:05:35 crc kubenswrapper[4998]: I0203 09:05:35.427806 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:05:35 crc kubenswrapper[4998]: E0203 09:05:35.428525 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:05:39 crc kubenswrapper[4998]: I0203 09:05:39.062069 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-j6skg"] Feb 03 09:05:39 crc kubenswrapper[4998]: I0203 09:05:39.091170 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-j6skg"] Feb 03 09:05:40 crc kubenswrapper[4998]: I0203 09:05:40.438667 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a12fec4-4f0b-415b-8dbe-2a81626c82d6" path="/var/lib/kubelet/pods/7a12fec4-4f0b-415b-8dbe-2a81626c82d6/volumes" Feb 03 09:05:45 crc kubenswrapper[4998]: E0203 09:05:45.079826 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c" Feb 03 09:05:45 crc kubenswrapper[4998]: E0203 09:05:45.080428 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:dc62889b883f597de91b5389cc52c84c607247d49a807693be2f688e4703dfc3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:1b555e21bba7c609111ace4380382a696d9aceeb6e9816bf9023b8f689b6c741,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:a223bab813b82d698992490bbb60927f6288a83ba52d539836c250e1471f6d34,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:e797cdb47beef40b04da7b6d645bca3dc32e6247003c45b56b38efd9e13bf01c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:093d2731ac848ed5fd57356b155a19d3bf7b8db96d95b09c5d0095e143f7254f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:7d662a120305e2528acc7e9142b770b5b6a7f4932ddfcadfa4ac953935124895,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:75465aabb0aa427a5c531a8fcde463f6d119afbcc618ebcbf6b7ee9bc8aad160,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:dc18c8d6a4a9a0a574a57cc5082c8a9b26023bd6d69b9732892d584c1dfe5070,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:369729978cecdc13c99ef3d179f8eb8a450a4a0cb70b63c27a55a15d1710ba27,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:d8c7a61d147f62b204d5c5f16864386025393453c9a81ea327bbd25d7765d611,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:b4a6eb1cc118a4334b424614959d8b7f361ddd779b3a72690ca49b0a3f26d9b8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:21d4fff670893ba4b7fbc528cd49f8b71c8281cede9ef84f0697065bb6a7fc50,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:12d9dbe297a1c3b9df671f21156992082bc483887d851fafe76e5d17321ff474,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:e65c37f04f6d76a0cbfe05edb3cddf6a8f14f859ee35cf3aebea8fcb991d2c19,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:48e4e178c6eeaa9d5dd77a591c185a311b4b4a5caadb7199d48463123e31dc9e,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v8v89,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-59bdc8b94-dhcsw_openshift-operators(96c6743e-239d-4bbc-adf2-4012a6af6282): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 09:05:45 crc kubenswrapper[4998]: E0203 09:05:45.081635 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" podUID="96c6743e-239d-4bbc-adf2-4012a6af6282" Feb 03 09:05:45 crc kubenswrapper[4998]: E0203 09:05:45.551819 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c\\\"\"" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" podUID="96c6743e-239d-4bbc-adf2-4012a6af6282" Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.562189 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" event={"ID":"a336920f-f4e8-4073-b392-bd5b2a4dffa3","Type":"ContainerStarted","Data":"a533f97bdacac1f40072ebe8afbb33e813ac5fda51e448a38f1baede38b641b0"} Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.565237 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" event={"ID":"476d94bb-903b-4592-af4f-57cc905396aa","Type":"ContainerStarted","Data":"0e22cf794ecbda1252a391ccc3f6cd201fe5ee6f87be91d71ba8589c81d92798"} Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.573014 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" event={"ID":"9f65ecc3-6c59-4f35-b5b4-927c48f1f89b","Type":"ContainerStarted","Data":"206bfd6023cf2760e6b30f6afc9554ce84d3b5c3a24640d2f0509df4a83e4926"} Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.574009 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.586127 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" event={"ID":"4796b22a-2d45-4db2-ad47-e5e010c0fd02","Type":"ContainerStarted","Data":"c8f20ad2376ebd518b09424642f39d7fe1b98bee16bbeb345698e856f2db0ac7"} Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.609589 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z" podStartSLOduration=3.044662639 podStartE2EDuration="20.609570406s" podCreationTimestamp="2026-02-03 09:05:26 +0000 UTC" firstStartedPulling="2026-02-03 09:05:27.710739979 +0000 UTC m=+8365.997433785" lastFinishedPulling="2026-02-03 09:05:45.275647726 +0000 UTC m=+8383.562341552" observedRunningTime="2026-02-03 09:05:46.604317846 +0000 UTC m=+8384.891011672" watchObservedRunningTime="2026-02-03 09:05:46.609570406 +0000 UTC m=+8384.896264212" Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.644507 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7" podStartSLOduration=3.085720935 podStartE2EDuration="20.644481827s" podCreationTimestamp="2026-02-03 09:05:26 +0000 UTC" firstStartedPulling="2026-02-03 09:05:27.715874255 +0000 UTC m=+8366.002568061" lastFinishedPulling="2026-02-03 09:05:45.274635137 +0000 UTC m=+8383.561328953" observedRunningTime="2026-02-03 09:05:46.629311346 +0000 UTC m=+8384.916005232" watchObservedRunningTime="2026-02-03 09:05:46.644481827 +0000 UTC m=+8384.931175633" Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.666804 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" podStartSLOduration=2.254976189 podStartE2EDuration="19.666769359s" podCreationTimestamp="2026-02-03 09:05:27 +0000 UTC" firstStartedPulling="2026-02-03 09:05:27.905253171 +0000 UTC m=+8366.191946977" lastFinishedPulling="2026-02-03 09:05:45.317046351 +0000 UTC m=+8383.603740147" observedRunningTime="2026-02-03 09:05:46.664528066 +0000 UTC m=+8384.951221872" watchObservedRunningTime="2026-02-03 09:05:46.666769359 +0000 UTC m=+8384.953463165" Feb 03 09:05:46 crc kubenswrapper[4998]: I0203 09:05:46.726078 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-f7cmc" podStartSLOduration=3.171188011 podStartE2EDuration="20.726057913s" podCreationTimestamp="2026-02-03 09:05:26 +0000 UTC" firstStartedPulling="2026-02-03 09:05:27.719760575 +0000 UTC m=+8366.006454381" lastFinishedPulling="2026-02-03 09:05:45.274630477 +0000 UTC m=+8383.561324283" observedRunningTime="2026-02-03 09:05:46.721190614 +0000 UTC m=+8385.007884440" watchObservedRunningTime="2026-02-03 09:05:46.726057913 +0000 UTC m=+8385.012751719" Feb 03 09:05:47 crc kubenswrapper[4998]: I0203 09:05:47.427256 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:05:47 crc kubenswrapper[4998]: E0203 09:05:47.427557 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:05:57 crc kubenswrapper[4998]: I0203 09:05:57.453284 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-t2wss" Feb 03 09:05:58 crc kubenswrapper[4998]: I0203 09:05:58.727163 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" event={"ID":"96c6743e-239d-4bbc-adf2-4012a6af6282","Type":"ContainerStarted","Data":"53649cb47fad3089d029a97ebe334e258de375c0dd5d74f13a5a8a342177ce8c"} Feb 03 09:05:58 crc kubenswrapper[4998]: I0203 09:05:58.728700 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:58 crc kubenswrapper[4998]: I0203 09:05:58.766536 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" Feb 03 09:05:58 crc kubenswrapper[4998]: I0203 09:05:58.769632 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-dhcsw" podStartSLOduration=3.358382974 podStartE2EDuration="32.769616962s" podCreationTimestamp="2026-02-03 09:05:26 +0000 UTC" firstStartedPulling="2026-02-03 09:05:27.960451688 +0000 UTC m=+8366.247145494" lastFinishedPulling="2026-02-03 09:05:57.371685676 +0000 UTC m=+8395.658379482" observedRunningTime="2026-02-03 09:05:58.765732462 +0000 UTC m=+8397.052426288" watchObservedRunningTime="2026-02-03 09:05:58.769616962 +0000 UTC m=+8397.056310768" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.428020 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:06:01 crc kubenswrapper[4998]: E0203 09:06:01.429021 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.430207 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.430437 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="098a8d80-a86e-4f18-811b-dec1c91614d8" containerName="openstackclient" containerID="cri-o://fb292e710049b7e29363c687f3180632419bef33ce334d7da9e9bb6582fe69b9" gracePeriod=2 Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.447977 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.540241 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 03 09:06:01 crc kubenswrapper[4998]: E0203 09:06:01.542205 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="098a8d80-a86e-4f18-811b-dec1c91614d8" containerName="openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.542245 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="098a8d80-a86e-4f18-811b-dec1c91614d8" containerName="openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.543523 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="098a8d80-a86e-4f18-811b-dec1c91614d8" containerName="openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.549246 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.555499 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="098a8d80-a86e-4f18-811b-dec1c91614d8" podUID="43f887ad-f9d1-4f23-b9be-21ad6bb1cd26" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.573536 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.610374 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config-secret\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.610441 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.610497 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7f4k\" (UniqueName: \"kubernetes.io/projected/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-kube-api-access-w7f4k\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.698265 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.699663 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.702152 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-wb59p" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.712071 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7f4k\" (UniqueName: \"kubernetes.io/projected/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-kube-api-access-w7f4k\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.712264 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config-secret\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.712301 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.713078 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.717202 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.737083 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7f4k\" (UniqueName: \"kubernetes.io/projected/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-kube-api-access-w7f4k\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.742381 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/43f887ad-f9d1-4f23-b9be-21ad6bb1cd26-openstack-config-secret\") pod \"openstackclient\" (UID: \"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26\") " pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.815270 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4rxh\" (UniqueName: \"kubernetes.io/projected/7afff0dd-f948-4551-b5ab-54c33db00a60-kube-api-access-v4rxh\") pod \"kube-state-metrics-0\" (UID: \"7afff0dd-f948-4551-b5ab-54c33db00a60\") " pod="openstack/kube-state-metrics-0" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.884919 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.917405 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4rxh\" (UniqueName: \"kubernetes.io/projected/7afff0dd-f948-4551-b5ab-54c33db00a60-kube-api-access-v4rxh\") pod \"kube-state-metrics-0\" (UID: \"7afff0dd-f948-4551-b5ab-54c33db00a60\") " pod="openstack/kube-state-metrics-0" Feb 03 09:06:01 crc kubenswrapper[4998]: I0203 09:06:01.959770 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4rxh\" (UniqueName: \"kubernetes.io/projected/7afff0dd-f948-4551-b5ab-54c33db00a60-kube-api-access-v4rxh\") pod \"kube-state-metrics-0\" (UID: \"7afff0dd-f948-4551-b5ab-54c33db00a60\") " pod="openstack/kube-state-metrics-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.019418 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.715917 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.718268 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.755826 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-8bj9k" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.756038 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.756175 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.757064 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.761116 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.785683 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862104 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862469 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862492 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862508 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862529 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862567 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt9ch\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-kube-api-access-mt9ch\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.862587 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.925531 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.939433 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 09:06:02 crc kubenswrapper[4998]: W0203 09:06:02.961224 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7afff0dd_f948_4551_b5ab_54c33db00a60.slice/crio-9d279767803a2c251bb782ed38655c408714970985287908368adcd73dfd44ab WatchSource:0}: Error finding container 9d279767803a2c251bb782ed38655c408714970985287908368adcd73dfd44ab: Status 404 returned error can't find the container with id 9d279767803a2c251bb782ed38655c408714970985287908368adcd73dfd44ab Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965487 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965541 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965567 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965596 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965665 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt9ch\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-kube-api-access-mt9ch\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965697 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.965886 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.966406 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.971212 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4055a58-20e7-4b1b-82da-876889ea8c68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.976129 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.981563 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.982005 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:02 crc kubenswrapper[4998]: I0203 09:06:02.982504 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a4055a58-20e7-4b1b-82da-876889ea8c68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.002191 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt9ch\" (UniqueName: \"kubernetes.io/projected/a4055a58-20e7-4b1b-82da-876889ea8c68-kube-api-access-mt9ch\") pod \"alertmanager-metric-storage-0\" (UID: \"a4055a58-20e7-4b1b-82da-876889ea8c68\") " pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.075416 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qsjnb"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.088015 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e543-account-create-update-g8nc2"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.097294 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qsjnb"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.108578 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.112137 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e543-account-create-update-g8nc2"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.195841 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.198421 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.204949 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.204993 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-q9vl9" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.205045 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.205132 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.205178 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.204963 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.205236 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.205299 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.215944 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.270634 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.270992 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271136 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271176 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271203 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c6fs\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-kube-api-access-7c6fs\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271242 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271284 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271315 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271364 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.271433 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.373975 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374043 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374112 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374191 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374274 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374305 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374403 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374447 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374478 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c6fs\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-kube-api-access-7c6fs\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.374517 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.378796 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.387654 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.389748 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.391175 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.397567 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.398353 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.410507 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.415217 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c6fs\" (UniqueName: \"kubernetes.io/projected/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-kube-api-access-7c6fs\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.419189 4998 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.419227 4998 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0adf1b70aa2e3c78ad0afd43b2d7fd6244d37d9096e666487e1ffb48f5b96eaa/globalmount\"" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.421386 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.538915 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7dd0bcc-4983-42ff-abdf-c5b65f3c6f75\") pod \"prometheus-metric-storage-0\" (UID: \"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac\") " pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.725762 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.830974 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.847880 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7afff0dd-f948-4551-b5ab-54c33db00a60","Type":"ContainerStarted","Data":"460d5b72df47fdcf2be66d20a196e9bc3dc628c549e4333d1f0ea16cc0966d2b"} Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.847940 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7afff0dd-f948-4551-b5ab-54c33db00a60","Type":"ContainerStarted","Data":"9d279767803a2c251bb782ed38655c408714970985287908368adcd73dfd44ab"} Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.847992 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.849636 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a4055a58-20e7-4b1b-82da-876889ea8c68","Type":"ContainerStarted","Data":"889bbf58baceaa02a7b215444a18356a4dd04b032e650f45838e968132bdc8a4"} Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.853915 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26","Type":"ContainerStarted","Data":"3d269c65c4ed03cf20c41ae9a13546eb683da7e079d5dc23113bca07503aa93a"} Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.853991 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"43f887ad-f9d1-4f23-b9be-21ad6bb1cd26","Type":"ContainerStarted","Data":"cf44051613fec273e63e3142ad49594cec198c2a76b7a81777e9e20863a0269b"} Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.855553 4998 generic.go:334] "Generic (PLEG): container finished" podID="098a8d80-a86e-4f18-811b-dec1c91614d8" containerID="fb292e710049b7e29363c687f3180632419bef33ce334d7da9e9bb6582fe69b9" exitCode=137 Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.855615 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78bb58710871d13cf9e277cf5c2716bf479f277a7353a90314479dee62f586bc" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.930020 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.9300017929999997 podStartE2EDuration="2.930001793s" podCreationTimestamp="2026-02-03 09:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 09:06:03.891887731 +0000 UTC m=+8402.178581547" watchObservedRunningTime="2026-02-03 09:06:03.930001793 +0000 UTC m=+8402.216695589" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.937618 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.450008887 podStartE2EDuration="2.937598899s" podCreationTimestamp="2026-02-03 09:06:01 +0000 UTC" firstStartedPulling="2026-02-03 09:06:02.964435631 +0000 UTC m=+8401.251129427" lastFinishedPulling="2026-02-03 09:06:03.452025633 +0000 UTC m=+8401.738719439" observedRunningTime="2026-02-03 09:06:03.872682546 +0000 UTC m=+8402.159376362" watchObservedRunningTime="2026-02-03 09:06:03.937598899 +0000 UTC m=+8402.224292705" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.981694 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 09:06:03 crc kubenswrapper[4998]: I0203 09:06:03.990536 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="098a8d80-a86e-4f18-811b-dec1c91614d8" podUID="43f887ad-f9d1-4f23-b9be-21ad6bb1cd26" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.091926 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config\") pod \"098a8d80-a86e-4f18-811b-dec1c91614d8\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.092119 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret\") pod \"098a8d80-a86e-4f18-811b-dec1c91614d8\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.092202 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhxxx\" (UniqueName: \"kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx\") pod \"098a8d80-a86e-4f18-811b-dec1c91614d8\" (UID: \"098a8d80-a86e-4f18-811b-dec1c91614d8\") " Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.101206 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx" (OuterVolumeSpecName: "kube-api-access-rhxxx") pod "098a8d80-a86e-4f18-811b-dec1c91614d8" (UID: "098a8d80-a86e-4f18-811b-dec1c91614d8"). InnerVolumeSpecName "kube-api-access-rhxxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.140139 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "098a8d80-a86e-4f18-811b-dec1c91614d8" (UID: "098a8d80-a86e-4f18-811b-dec1c91614d8"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.163616 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "098a8d80-a86e-4f18-811b-dec1c91614d8" (UID: "098a8d80-a86e-4f18-811b-dec1c91614d8"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.194982 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.195015 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhxxx\" (UniqueName: \"kubernetes.io/projected/098a8d80-a86e-4f18-811b-dec1c91614d8-kube-api-access-rhxxx\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.195025 4998 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/098a8d80-a86e-4f18-811b-dec1c91614d8-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:04 crc kubenswrapper[4998]: W0203 09:06:04.439001 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc3e90e8_8eb3_46e1_8fc9_3c3158c873ac.slice/crio-63e4c373732be5b397c7d89f2b53ef5ef5096a4f146278ad5e9a0a78b1f33748 WatchSource:0}: Error finding container 63e4c373732be5b397c7d89f2b53ef5ef5096a4f146278ad5e9a0a78b1f33748: Status 404 returned error can't find the container with id 63e4c373732be5b397c7d89f2b53ef5ef5096a4f146278ad5e9a0a78b1f33748 Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.446444 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="098a8d80-a86e-4f18-811b-dec1c91614d8" path="/var/lib/kubelet/pods/098a8d80-a86e-4f18-811b-dec1c91614d8/volumes" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.447039 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f86d75f0-cebd-4d4c-84b6-7350ff9ecd52" path="/var/lib/kubelet/pods/f86d75f0-cebd-4d4c-84b6-7350ff9ecd52/volumes" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.447672 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fae72d4e-5c7f-4a3a-b59f-bc6e42338e97" path="/var/lib/kubelet/pods/fae72d4e-5c7f-4a3a-b59f-bc6e42338e97/volumes" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.449247 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.864797 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.864791 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerStarted","Data":"63e4c373732be5b397c7d89f2b53ef5ef5096a4f146278ad5e9a0a78b1f33748"} Feb 03 09:06:04 crc kubenswrapper[4998]: I0203 09:06:04.871078 4998 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="098a8d80-a86e-4f18-811b-dec1c91614d8" podUID="43f887ad-f9d1-4f23-b9be-21ad6bb1cd26" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.215013 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cs7gc"] Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.224466 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.238325 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cs7gc"] Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.309979 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5zvm\" (UniqueName: \"kubernetes.io/projected/7d0c7927-4f13-40bb-b344-3353322d4964-kube-api-access-k5zvm\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.310044 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-catalog-content\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.310062 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-utilities\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.411962 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5zvm\" (UniqueName: \"kubernetes.io/projected/7d0c7927-4f13-40bb-b344-3353322d4964-kube-api-access-k5zvm\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.412045 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-catalog-content\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.412068 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-utilities\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.412630 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-utilities\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.412700 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d0c7927-4f13-40bb-b344-3353322d4964-catalog-content\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.470471 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5zvm\" (UniqueName: \"kubernetes.io/projected/7d0c7927-4f13-40bb-b344-3353322d4964-kube-api-access-k5zvm\") pod \"community-operators-cs7gc\" (UID: \"7d0c7927-4f13-40bb-b344-3353322d4964\") " pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:09 crc kubenswrapper[4998]: I0203 09:06:09.558431 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.030683 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-gz7j2"] Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.041228 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-gz7j2"] Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.167606 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cs7gc"] Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.438536 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f414436-f176-40ab-b8e6-6115625b5c66" path="/var/lib/kubelet/pods/3f414436-f176-40ab-b8e6-6115625b5c66/volumes" Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.495663 4998 scope.go:117] "RemoveContainer" containerID="a614b89c08dbceab2098023883c419fd3232ff80bb1465a266e95fa0f586a82c" Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.701773 4998 scope.go:117] "RemoveContainer" containerID="308c95c905f9b61d093bef44cbdabd7f755e47b6921740866a7ece4f7927ad4a" Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.836297 4998 scope.go:117] "RemoveContainer" containerID="e0eb951f59cc81cd84f7352acf66f491cedc1d18f5b15f1f270fcf806e59eca3" Feb 03 09:06:10 crc kubenswrapper[4998]: I0203 09:06:10.927061 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7gc" event={"ID":"7d0c7927-4f13-40bb-b344-3353322d4964","Type":"ContainerStarted","Data":"836ddf7d1b73d6d0b0037f9b04dcd94adfc34753fa2679557a452b3b88b1bba0"} Feb 03 09:06:11 crc kubenswrapper[4998]: I0203 09:06:11.189297 4998 scope.go:117] "RemoveContainer" containerID="0d6dfd70db670ab6c62e2ec19f878b4c4f6a001506ecebe77e45c6c39723fe5f" Feb 03 09:06:11 crc kubenswrapper[4998]: I0203 09:06:11.384319 4998 scope.go:117] "RemoveContainer" containerID="94d9071e95c57827561a5252c694b0ab5ed8f25f7224d4fa200cbf12551945aa" Feb 03 09:06:11 crc kubenswrapper[4998]: I0203 09:06:11.990137 4998 scope.go:117] "RemoveContainer" containerID="c91c19cfc1b81f291034be303b73960734521d310e296607382f07a72d87478a" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.023915 4998 scope.go:117] "RemoveContainer" containerID="f159a68d249149e532b4e23ed202fd8b3e27eeba75935c2c911ae310e321216f" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.029189 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.094056 4998 scope.go:117] "RemoveContainer" containerID="19be361f68d5fa33337eec7b703ddcdbfcc0d5801591421c78a19618b29382fb" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.181800 4998 scope.go:117] "RemoveContainer" containerID="fb292e710049b7e29363c687f3180632419bef33ce334d7da9e9bb6582fe69b9" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.200648 4998 scope.go:117] "RemoveContainer" containerID="e8de2d4f278eedd2fd9e28c87db243b68a732d3feadfc8fa81335572005d6d58" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.231299 4998 scope.go:117] "RemoveContainer" containerID="8f9639a18e4113fbeb742ebf36081e3f4173a8bab979f62fb26e09c6141b158c" Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.952188 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a4055a58-20e7-4b1b-82da-876889ea8c68","Type":"ContainerStarted","Data":"b6b1015759f705554cf946a095048f9b52771d010202c32698ef49b05a987606"} Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.953803 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerStarted","Data":"9162d2f3216c9bd8188dbed65ce99b0770064573734296119cb8d51273eb6f78"} Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.955665 4998 generic.go:334] "Generic (PLEG): container finished" podID="7d0c7927-4f13-40bb-b344-3353322d4964" containerID="cac65aa19a7dbb31e49fab161d9a790ef7c5ca39ec54511569f9f585b04841c4" exitCode=0 Feb 03 09:06:12 crc kubenswrapper[4998]: I0203 09:06:12.955717 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7gc" event={"ID":"7d0c7927-4f13-40bb-b344-3353322d4964","Type":"ContainerDied","Data":"cac65aa19a7dbb31e49fab161d9a790ef7c5ca39ec54511569f9f585b04841c4"} Feb 03 09:06:13 crc kubenswrapper[4998]: I0203 09:06:13.428118 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:06:13 crc kubenswrapper[4998]: I0203 09:06:13.971808 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8"} Feb 03 09:06:19 crc kubenswrapper[4998]: I0203 09:06:19.040212 4998 generic.go:334] "Generic (PLEG): container finished" podID="a4055a58-20e7-4b1b-82da-876889ea8c68" containerID="b6b1015759f705554cf946a095048f9b52771d010202c32698ef49b05a987606" exitCode=0 Feb 03 09:06:19 crc kubenswrapper[4998]: I0203 09:06:19.040288 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a4055a58-20e7-4b1b-82da-876889ea8c68","Type":"ContainerDied","Data":"b6b1015759f705554cf946a095048f9b52771d010202c32698ef49b05a987606"} Feb 03 09:06:19 crc kubenswrapper[4998]: I0203 09:06:19.044523 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 09:06:19 crc kubenswrapper[4998]: I0203 09:06:19.048296 4998 generic.go:334] "Generic (PLEG): container finished" podID="7d0c7927-4f13-40bb-b344-3353322d4964" containerID="abe80a0a90b725abfe6f6dd3ff0be8d2f5cf7666bd4029c4e74b992b954dab6e" exitCode=0 Feb 03 09:06:19 crc kubenswrapper[4998]: I0203 09:06:19.048337 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7gc" event={"ID":"7d0c7927-4f13-40bb-b344-3353322d4964","Type":"ContainerDied","Data":"abe80a0a90b725abfe6f6dd3ff0be8d2f5cf7666bd4029c4e74b992b954dab6e"} Feb 03 09:06:20 crc kubenswrapper[4998]: I0203 09:06:20.059697 4998 generic.go:334] "Generic (PLEG): container finished" podID="dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac" containerID="9162d2f3216c9bd8188dbed65ce99b0770064573734296119cb8d51273eb6f78" exitCode=0 Feb 03 09:06:20 crc kubenswrapper[4998]: I0203 09:06:20.059793 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerDied","Data":"9162d2f3216c9bd8188dbed65ce99b0770064573734296119cb8d51273eb6f78"} Feb 03 09:06:21 crc kubenswrapper[4998]: I0203 09:06:21.082557 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7gc" event={"ID":"7d0c7927-4f13-40bb-b344-3353322d4964","Type":"ContainerStarted","Data":"ff12a4ffed784f8e8dece7fdb6b23e86cead4584bb2db38e8a2374289395e8b0"} Feb 03 09:06:21 crc kubenswrapper[4998]: I0203 09:06:21.103582 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cs7gc" podStartSLOduration=5.551719696 podStartE2EDuration="12.103558329s" podCreationTimestamp="2026-02-03 09:06:09 +0000 UTC" firstStartedPulling="2026-02-03 09:06:12.957368564 +0000 UTC m=+8411.244062370" lastFinishedPulling="2026-02-03 09:06:19.509207197 +0000 UTC m=+8417.795901003" observedRunningTime="2026-02-03 09:06:21.099610707 +0000 UTC m=+8419.386304533" watchObservedRunningTime="2026-02-03 09:06:21.103558329 +0000 UTC m=+8419.390252145" Feb 03 09:06:22 crc kubenswrapper[4998]: I0203 09:06:22.095043 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a4055a58-20e7-4b1b-82da-876889ea8c68","Type":"ContainerStarted","Data":"9ee3b23c023ed9af9933567af17a6613e82eed36f145b0530920a5ddc3b2901a"} Feb 03 09:06:26 crc kubenswrapper[4998]: I0203 09:06:26.139536 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a4055a58-20e7-4b1b-82da-876889ea8c68","Type":"ContainerStarted","Data":"8a142f8510cadd595b83cab0d18917dc84c105798a4dbfba19d2d1424d445d41"} Feb 03 09:06:27 crc kubenswrapper[4998]: I0203 09:06:27.154629 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerStarted","Data":"bfa9735ee4611e64ffa0502a8c194990dc597f89d243a4fd240f8d6d0003b193"} Feb 03 09:06:27 crc kubenswrapper[4998]: I0203 09:06:27.154954 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:27 crc kubenswrapper[4998]: I0203 09:06:27.158544 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Feb 03 09:06:27 crc kubenswrapper[4998]: I0203 09:06:27.194928 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=7.50980063 podStartE2EDuration="25.194909119s" podCreationTimestamp="2026-02-03 09:06:02 +0000 UTC" firstStartedPulling="2026-02-03 09:06:03.752125133 +0000 UTC m=+8402.038818939" lastFinishedPulling="2026-02-03 09:06:21.437233622 +0000 UTC m=+8419.723927428" observedRunningTime="2026-02-03 09:06:27.18790199 +0000 UTC m=+8425.474595816" watchObservedRunningTime="2026-02-03 09:06:27.194909119 +0000 UTC m=+8425.481602925" Feb 03 09:06:29 crc kubenswrapper[4998]: I0203 09:06:29.559563 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:29 crc kubenswrapper[4998]: I0203 09:06:29.560110 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:29 crc kubenswrapper[4998]: I0203 09:06:29.609328 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:30 crc kubenswrapper[4998]: I0203 09:06:30.243306 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cs7gc" Feb 03 09:06:30 crc kubenswrapper[4998]: I0203 09:06:30.314593 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cs7gc"] Feb 03 09:06:30 crc kubenswrapper[4998]: I0203 09:06:30.351325 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 09:06:30 crc kubenswrapper[4998]: I0203 09:06:30.351594 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2rc5f" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="registry-server" containerID="cri-o://8ccc154fb3dc102db62d85ba00101dcd83b805c4d87df9f0b9a540d70767683a" gracePeriod=2 Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.196922 4998 generic.go:334] "Generic (PLEG): container finished" podID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerID="8ccc154fb3dc102db62d85ba00101dcd83b805c4d87df9f0b9a540d70767683a" exitCode=0 Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.197009 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerDied","Data":"8ccc154fb3dc102db62d85ba00101dcd83b805c4d87df9f0b9a540d70767683a"} Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.396028 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.498037 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities\") pod \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.498389 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content\") pod \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.498420 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26zv7\" (UniqueName: \"kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7\") pod \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\" (UID: \"5f855b43-d82a-4ce2-8471-f3117bf7fd52\") " Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.499744 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities" (OuterVolumeSpecName: "utilities") pod "5f855b43-d82a-4ce2-8471-f3117bf7fd52" (UID: "5f855b43-d82a-4ce2-8471-f3117bf7fd52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.514297 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7" (OuterVolumeSpecName: "kube-api-access-26zv7") pod "5f855b43-d82a-4ce2-8471-f3117bf7fd52" (UID: "5f855b43-d82a-4ce2-8471-f3117bf7fd52"). InnerVolumeSpecName "kube-api-access-26zv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.572872 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f855b43-d82a-4ce2-8471-f3117bf7fd52" (UID: "5f855b43-d82a-4ce2-8471-f3117bf7fd52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.600675 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.600714 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26zv7\" (UniqueName: \"kubernetes.io/projected/5f855b43-d82a-4ce2-8471-f3117bf7fd52-kube-api-access-26zv7\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:31 crc kubenswrapper[4998]: I0203 09:06:31.600725 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f855b43-d82a-4ce2-8471-f3117bf7fd52-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.208827 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2rc5f" Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.208828 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2rc5f" event={"ID":"5f855b43-d82a-4ce2-8471-f3117bf7fd52","Type":"ContainerDied","Data":"68bb625744288f4708835208bf695ad127944cdc9a738247f978dbd87e761b08"} Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.208902 4998 scope.go:117] "RemoveContainer" containerID="8ccc154fb3dc102db62d85ba00101dcd83b805c4d87df9f0b9a540d70767683a" Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.260697 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.270520 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2rc5f"] Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.358842 4998 scope.go:117] "RemoveContainer" containerID="54034c3a23543b29928ccb8e658c633a86eb29ed67d6a4f431f2995d78e535f6" Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.460487 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" path="/var/lib/kubelet/pods/5f855b43-d82a-4ce2-8471-f3117bf7fd52/volumes" Feb 03 09:06:32 crc kubenswrapper[4998]: I0203 09:06:32.480076 4998 scope.go:117] "RemoveContainer" containerID="bca87ad367f80113f0bbad6e3f6aec7ff29ea8c88084ad5c4ed3e4855914fee2" Feb 03 09:06:33 crc kubenswrapper[4998]: I0203 09:06:33.223619 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerStarted","Data":"6e0cbb23703f4a2fa5bd76c44bd27f30f3ef2f90a14af50ab33eac91debbfd33"} Feb 03 09:06:37 crc kubenswrapper[4998]: I0203 09:06:37.270735 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac","Type":"ContainerStarted","Data":"15a5bcf4c4fd093973103fa81bdb75804ea998a27254e788bc46f81aee74bde8"} Feb 03 09:06:37 crc kubenswrapper[4998]: I0203 09:06:37.312433 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.422660589 podStartE2EDuration="35.312417721s" podCreationTimestamp="2026-02-03 09:06:02 +0000 UTC" firstStartedPulling="2026-02-03 09:06:04.445346613 +0000 UTC m=+8402.732040419" lastFinishedPulling="2026-02-03 09:06:36.335103745 +0000 UTC m=+8434.621797551" observedRunningTime="2026-02-03 09:06:37.303362513 +0000 UTC m=+8435.590056329" watchObservedRunningTime="2026-02-03 09:06:37.312417721 +0000 UTC m=+8435.599111527" Feb 03 09:06:38 crc kubenswrapper[4998]: I0203 09:06:38.831444 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.822669 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:06:40 crc kubenswrapper[4998]: E0203 09:06:40.824022 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="registry-server" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.824041 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="registry-server" Feb 03 09:06:40 crc kubenswrapper[4998]: E0203 09:06:40.824056 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="extract-utilities" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.824062 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="extract-utilities" Feb 03 09:06:40 crc kubenswrapper[4998]: E0203 09:06:40.824104 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="extract-content" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.824112 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="extract-content" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.824306 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f855b43-d82a-4ce2-8471-f3117bf7fd52" containerName="registry-server" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.826310 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.828529 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.828816 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.834815 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.912899 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjxlz\" (UniqueName: \"kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.913211 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.913350 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.913467 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.913673 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.913828 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:40 crc kubenswrapper[4998]: I0203 09:06:40.914036 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.015834 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.015884 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjxlz\" (UniqueName: \"kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.015906 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.015937 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.015956 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.016036 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.016065 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.016428 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.017116 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.024582 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.024997 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.026255 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.031102 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.035621 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjxlz\" (UniqueName: \"kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz\") pod \"ceilometer-0\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.198718 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:06:41 crc kubenswrapper[4998]: I0203 09:06:41.652419 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:06:42 crc kubenswrapper[4998]: I0203 09:06:42.349345 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerStarted","Data":"a6308402886347991fab1058427e45c86736ae74f165ee1b5dbb523ef6e54cc6"} Feb 03 09:06:45 crc kubenswrapper[4998]: I0203 09:06:45.385937 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerStarted","Data":"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b"} Feb 03 09:06:47 crc kubenswrapper[4998]: I0203 09:06:47.422699 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerStarted","Data":"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872"} Feb 03 09:06:48 crc kubenswrapper[4998]: I0203 09:06:48.440909 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerStarted","Data":"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141"} Feb 03 09:06:48 crc kubenswrapper[4998]: I0203 09:06:48.831890 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:48 crc kubenswrapper[4998]: I0203 09:06:48.835245 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:49 crc kubenswrapper[4998]: I0203 09:06:49.468424 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Feb 03 09:06:50 crc kubenswrapper[4998]: I0203 09:06:50.476716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerStarted","Data":"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd"} Feb 03 09:06:50 crc kubenswrapper[4998]: I0203 09:06:50.504518 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.602913374 podStartE2EDuration="10.504486245s" podCreationTimestamp="2026-02-03 09:06:40 +0000 UTC" firstStartedPulling="2026-02-03 09:06:41.653399938 +0000 UTC m=+8439.940093764" lastFinishedPulling="2026-02-03 09:06:49.554972819 +0000 UTC m=+8447.841666635" observedRunningTime="2026-02-03 09:06:50.496435427 +0000 UTC m=+8448.783129253" watchObservedRunningTime="2026-02-03 09:06:50.504486245 +0000 UTC m=+8448.791180061" Feb 03 09:06:51 crc kubenswrapper[4998]: I0203 09:06:51.485182 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 09:06:56 crc kubenswrapper[4998]: I0203 09:06:56.864188 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-cgx48"] Feb 03 09:06:56 crc kubenswrapper[4998]: I0203 09:06:56.866012 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:56 crc kubenswrapper[4998]: I0203 09:06:56.879150 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-cgx48"] Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.008385 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvxsz\" (UniqueName: \"kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.008706 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.076989 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-c2af-account-create-update-5b6b9"] Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.078600 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.081105 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.111062 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvxsz\" (UniqueName: \"kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.111349 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.112259 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.129703 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvxsz\" (UniqueName: \"kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz\") pod \"aodh-db-create-cgx48\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.169867 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-c2af-account-create-update-5b6b9"] Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.194666 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cgx48" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.213635 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.213707 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq7nc\" (UniqueName: \"kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.318736 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.319072 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq7nc\" (UniqueName: \"kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.319769 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.343530 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq7nc\" (UniqueName: \"kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc\") pod \"aodh-c2af-account-create-update-5b6b9\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.500390 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.723075 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-cgx48"] Feb 03 09:06:57 crc kubenswrapper[4998]: W0203 09:06:57.951874 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaecb9cbd_a669_4ea1_969e_424637c3b33d.slice/crio-732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab WatchSource:0}: Error finding container 732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab: Status 404 returned error can't find the container with id 732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab Feb 03 09:06:57 crc kubenswrapper[4998]: I0203 09:06:57.953472 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-c2af-account-create-update-5b6b9"] Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.566858 4998 generic.go:334] "Generic (PLEG): container finished" podID="61eefdf4-447e-4503-b1c7-b36d866c1aec" containerID="e1a128cad548385ba72422f0fc1e5d5a3dc32c9886536cecba0a874cd65a5b11" exitCode=0 Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.566954 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cgx48" event={"ID":"61eefdf4-447e-4503-b1c7-b36d866c1aec","Type":"ContainerDied","Data":"e1a128cad548385ba72422f0fc1e5d5a3dc32c9886536cecba0a874cd65a5b11"} Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.567274 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cgx48" event={"ID":"61eefdf4-447e-4503-b1c7-b36d866c1aec","Type":"ContainerStarted","Data":"191be4ae9f36dbb04f6c7a186852a58a1c742ace40b2e8225039550cb5b1216d"} Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.569150 4998 generic.go:334] "Generic (PLEG): container finished" podID="aecb9cbd-a669-4ea1-969e-424637c3b33d" containerID="305fc5bd303af446d4cb6a4fd9ddc6a34f4f51d82f1c3395fb989d0833ed9c1c" exitCode=0 Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.569209 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-c2af-account-create-update-5b6b9" event={"ID":"aecb9cbd-a669-4ea1-969e-424637c3b33d","Type":"ContainerDied","Data":"305fc5bd303af446d4cb6a4fd9ddc6a34f4f51d82f1c3395fb989d0833ed9c1c"} Feb 03 09:06:58 crc kubenswrapper[4998]: I0203 09:06:58.569253 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-c2af-account-create-update-5b6b9" event={"ID":"aecb9cbd-a669-4ea1-969e-424637c3b33d","Type":"ContainerStarted","Data":"732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab"} Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.058578 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.065902 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cgx48" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.179738 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts\") pod \"aecb9cbd-a669-4ea1-969e-424637c3b33d\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.179984 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq7nc\" (UniqueName: \"kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc\") pod \"aecb9cbd-a669-4ea1-969e-424637c3b33d\" (UID: \"aecb9cbd-a669-4ea1-969e-424637c3b33d\") " Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.180058 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts\") pod \"61eefdf4-447e-4503-b1c7-b36d866c1aec\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.180136 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvxsz\" (UniqueName: \"kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz\") pod \"61eefdf4-447e-4503-b1c7-b36d866c1aec\" (UID: \"61eefdf4-447e-4503-b1c7-b36d866c1aec\") " Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.180581 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61eefdf4-447e-4503-b1c7-b36d866c1aec" (UID: "61eefdf4-447e-4503-b1c7-b36d866c1aec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.181252 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aecb9cbd-a669-4ea1-969e-424637c3b33d" (UID: "aecb9cbd-a669-4ea1-969e-424637c3b33d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.187574 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc" (OuterVolumeSpecName: "kube-api-access-bq7nc") pod "aecb9cbd-a669-4ea1-969e-424637c3b33d" (UID: "aecb9cbd-a669-4ea1-969e-424637c3b33d"). InnerVolumeSpecName "kube-api-access-bq7nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.187907 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz" (OuterVolumeSpecName: "kube-api-access-xvxsz") pod "61eefdf4-447e-4503-b1c7-b36d866c1aec" (UID: "61eefdf4-447e-4503-b1c7-b36d866c1aec"). InnerVolumeSpecName "kube-api-access-xvxsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.282246 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq7nc\" (UniqueName: \"kubernetes.io/projected/aecb9cbd-a669-4ea1-969e-424637c3b33d-kube-api-access-bq7nc\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.282295 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61eefdf4-447e-4503-b1c7-b36d866c1aec-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.282307 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvxsz\" (UniqueName: \"kubernetes.io/projected/61eefdf4-447e-4503-b1c7-b36d866c1aec-kube-api-access-xvxsz\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.282316 4998 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aecb9cbd-a669-4ea1-969e-424637c3b33d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.591157 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-c2af-account-create-update-5b6b9" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.591167 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-c2af-account-create-update-5b6b9" event={"ID":"aecb9cbd-a669-4ea1-969e-424637c3b33d","Type":"ContainerDied","Data":"732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab"} Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.591241 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="732d94898499ca04eef755efe0d9e9c47dece415811c7b2898d423f697a166ab" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.592657 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cgx48" event={"ID":"61eefdf4-447e-4503-b1c7-b36d866c1aec","Type":"ContainerDied","Data":"191be4ae9f36dbb04f6c7a186852a58a1c742ace40b2e8225039550cb5b1216d"} Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.592692 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="191be4ae9f36dbb04f6c7a186852a58a1c742ace40b2e8225039550cb5b1216d" Feb 03 09:07:00 crc kubenswrapper[4998]: I0203 09:07:00.592709 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cgx48" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.317292 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-gghpz"] Feb 03 09:07:02 crc kubenswrapper[4998]: E0203 09:07:02.318191 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aecb9cbd-a669-4ea1-969e-424637c3b33d" containerName="mariadb-account-create-update" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.318211 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="aecb9cbd-a669-4ea1-969e-424637c3b33d" containerName="mariadb-account-create-update" Feb 03 09:07:02 crc kubenswrapper[4998]: E0203 09:07:02.318255 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61eefdf4-447e-4503-b1c7-b36d866c1aec" containerName="mariadb-database-create" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.318266 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="61eefdf4-447e-4503-b1c7-b36d866c1aec" containerName="mariadb-database-create" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.318502 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="61eefdf4-447e-4503-b1c7-b36d866c1aec" containerName="mariadb-database-create" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.318524 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="aecb9cbd-a669-4ea1-969e-424637c3b33d" containerName="mariadb-account-create-update" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.319565 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.321910 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.322199 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.322361 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-snst4" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.326583 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.345686 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-gghpz"] Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.425379 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.425738 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.425772 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntvtb\" (UniqueName: \"kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.425831 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.528081 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.528169 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.528205 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntvtb\" (UniqueName: \"kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.528254 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.530518 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.531502 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.533131 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.544859 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.545215 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.553229 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntvtb\" (UniqueName: \"kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb\") pod \"aodh-db-sync-gghpz\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.643276 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-snst4" Feb 03 09:07:02 crc kubenswrapper[4998]: I0203 09:07:02.651745 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:03 crc kubenswrapper[4998]: W0203 09:07:03.149196 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf5e8da4_94ba_438f_b45d_8052aff4265f.slice/crio-54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258 WatchSource:0}: Error finding container 54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258: Status 404 returned error can't find the container with id 54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258 Feb 03 09:07:03 crc kubenswrapper[4998]: I0203 09:07:03.157129 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-gghpz"] Feb 03 09:07:03 crc kubenswrapper[4998]: I0203 09:07:03.629479 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-gghpz" event={"ID":"af5e8da4-94ba-438f-b45d-8052aff4265f","Type":"ContainerStarted","Data":"54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258"} Feb 03 09:07:07 crc kubenswrapper[4998]: I0203 09:07:07.984332 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 09:07:08 crc kubenswrapper[4998]: I0203 09:07:08.739926 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-gghpz" event={"ID":"af5e8da4-94ba-438f-b45d-8052aff4265f","Type":"ContainerStarted","Data":"00478f3fdb678a0915b8dc7c98090feab63d5bbd325facb4ddd67955e212997c"} Feb 03 09:07:08 crc kubenswrapper[4998]: I0203 09:07:08.771166 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-gghpz" podStartSLOduration=1.941887394 podStartE2EDuration="6.771139715s" podCreationTimestamp="2026-02-03 09:07:02 +0000 UTC" firstStartedPulling="2026-02-03 09:07:03.151560247 +0000 UTC m=+8461.438254043" lastFinishedPulling="2026-02-03 09:07:07.980812568 +0000 UTC m=+8466.267506364" observedRunningTime="2026-02-03 09:07:08.764041944 +0000 UTC m=+8467.050735830" watchObservedRunningTime="2026-02-03 09:07:08.771139715 +0000 UTC m=+8467.057833531" Feb 03 09:07:11 crc kubenswrapper[4998]: I0203 09:07:11.205270 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 09:07:11 crc kubenswrapper[4998]: I0203 09:07:11.772663 4998 generic.go:334] "Generic (PLEG): container finished" podID="af5e8da4-94ba-438f-b45d-8052aff4265f" containerID="00478f3fdb678a0915b8dc7c98090feab63d5bbd325facb4ddd67955e212997c" exitCode=0 Feb 03 09:07:11 crc kubenswrapper[4998]: I0203 09:07:11.772702 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-gghpz" event={"ID":"af5e8da4-94ba-438f-b45d-8052aff4265f","Type":"ContainerDied","Data":"00478f3fdb678a0915b8dc7c98090feab63d5bbd325facb4ddd67955e212997c"} Feb 03 09:07:12 crc kubenswrapper[4998]: I0203 09:07:12.038161 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-g6h6c"] Feb 03 09:07:12 crc kubenswrapper[4998]: I0203 09:07:12.051065 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-g6h6c"] Feb 03 09:07:12 crc kubenswrapper[4998]: I0203 09:07:12.444604 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3989a09a-1771-44a0-b2e4-32cf6683215b" path="/var/lib/kubelet/pods/3989a09a-1771-44a0-b2e4-32cf6683215b/volumes" Feb 03 09:07:12 crc kubenswrapper[4998]: I0203 09:07:12.476195 4998 scope.go:117] "RemoveContainer" containerID="9fe4ee232680b2064b5d5b5af07787a530bff7fe8a504f89061cfe01348795cd" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.048975 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-71df-account-create-update-cgzrb"] Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.059893 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-71df-account-create-update-cgzrb"] Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.189817 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.370573 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntvtb\" (UniqueName: \"kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb\") pod \"af5e8da4-94ba-438f-b45d-8052aff4265f\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.370622 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data\") pod \"af5e8da4-94ba-438f-b45d-8052aff4265f\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.370660 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts\") pod \"af5e8da4-94ba-438f-b45d-8052aff4265f\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.370795 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle\") pod \"af5e8da4-94ba-438f-b45d-8052aff4265f\" (UID: \"af5e8da4-94ba-438f-b45d-8052aff4265f\") " Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.376993 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb" (OuterVolumeSpecName: "kube-api-access-ntvtb") pod "af5e8da4-94ba-438f-b45d-8052aff4265f" (UID: "af5e8da4-94ba-438f-b45d-8052aff4265f"). InnerVolumeSpecName "kube-api-access-ntvtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.379951 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts" (OuterVolumeSpecName: "scripts") pod "af5e8da4-94ba-438f-b45d-8052aff4265f" (UID: "af5e8da4-94ba-438f-b45d-8052aff4265f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.406683 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data" (OuterVolumeSpecName: "config-data") pod "af5e8da4-94ba-438f-b45d-8052aff4265f" (UID: "af5e8da4-94ba-438f-b45d-8052aff4265f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.410884 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af5e8da4-94ba-438f-b45d-8052aff4265f" (UID: "af5e8da4-94ba-438f-b45d-8052aff4265f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.473342 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntvtb\" (UniqueName: \"kubernetes.io/projected/af5e8da4-94ba-438f-b45d-8052aff4265f-kube-api-access-ntvtb\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.473391 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.473403 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.473412 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5e8da4-94ba-438f-b45d-8052aff4265f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.796013 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-gghpz" event={"ID":"af5e8da4-94ba-438f-b45d-8052aff4265f","Type":"ContainerDied","Data":"54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258"} Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.796064 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54ae9148817287c08668fd149785042a49c779d2763423addfecc23862fab258" Feb 03 09:07:13 crc kubenswrapper[4998]: I0203 09:07:13.796142 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-gghpz" Feb 03 09:07:14 crc kubenswrapper[4998]: I0203 09:07:14.441138 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88e180ca-ddca-470a-9777-152fd462fc8b" path="/var/lib/kubelet/pods/88e180ca-ddca-470a-9777-152fd462fc8b/volumes" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.519084 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Feb 03 09:07:17 crc kubenswrapper[4998]: E0203 09:07:17.519636 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af5e8da4-94ba-438f-b45d-8052aff4265f" containerName="aodh-db-sync" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.519652 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="af5e8da4-94ba-438f-b45d-8052aff4265f" containerName="aodh-db-sync" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.520002 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="af5e8da4-94ba-438f-b45d-8052aff4265f" containerName="aodh-db-sync" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.526834 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.530371 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.530456 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.530641 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-snst4" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.547630 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.572120 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhw8r\" (UniqueName: \"kubernetes.io/projected/469f046f-ad13-49c3-b9da-ac6c46b48882-kube-api-access-hhw8r\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.572175 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-config-data\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.572209 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-scripts\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.572282 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-combined-ca-bundle\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.675058 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhw8r\" (UniqueName: \"kubernetes.io/projected/469f046f-ad13-49c3-b9da-ac6c46b48882-kube-api-access-hhw8r\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.675125 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-config-data\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.675170 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-scripts\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.675268 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-combined-ca-bundle\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.685546 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-combined-ca-bundle\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.686540 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-config-data\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.694282 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469f046f-ad13-49c3-b9da-ac6c46b48882-scripts\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.708938 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhw8r\" (UniqueName: \"kubernetes.io/projected/469f046f-ad13-49c3-b9da-ac6c46b48882-kube-api-access-hhw8r\") pod \"aodh-0\" (UID: \"469f046f-ad13-49c3-b9da-ac6c46b48882\") " pod="openstack/aodh-0" Feb 03 09:07:17 crc kubenswrapper[4998]: I0203 09:07:17.891702 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Feb 03 09:07:18 crc kubenswrapper[4998]: I0203 09:07:18.478910 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Feb 03 09:07:18 crc kubenswrapper[4998]: I0203 09:07:18.864251 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"469f046f-ad13-49c3-b9da-ac6c46b48882","Type":"ContainerStarted","Data":"d0194ac1da9e71489339ba82b7bbee79187d72953268d3de7e8b1d8d1bfe339d"} Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.674808 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.675337 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-central-agent" containerID="cri-o://8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" gracePeriod=30 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.675527 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-notification-agent" containerID="cri-o://952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" gracePeriod=30 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.675546 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="596da915-f039-417b-8700-55f18846872e" containerName="sg-core" containerID="cri-o://29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" gracePeriod=30 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.675945 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="596da915-f039-417b-8700-55f18846872e" containerName="proxy-httpd" containerID="cri-o://ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" gracePeriod=30 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.875357 4998 generic.go:334] "Generic (PLEG): container finished" podID="596da915-f039-417b-8700-55f18846872e" containerID="ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" exitCode=0 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.875647 4998 generic.go:334] "Generic (PLEG): container finished" podID="596da915-f039-417b-8700-55f18846872e" containerID="29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" exitCode=2 Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.875529 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerDied","Data":"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd"} Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.875711 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerDied","Data":"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141"} Feb 03 09:07:19 crc kubenswrapper[4998]: I0203 09:07:19.876959 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"469f046f-ad13-49c3-b9da-ac6c46b48882","Type":"ContainerStarted","Data":"2d3d34c2b038910ce740cd4409b20d2e970ac5a0554c0f4e9047a431fe95e7bd"} Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.795237 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.910295 4998 generic.go:334] "Generic (PLEG): container finished" podID="596da915-f039-417b-8700-55f18846872e" containerID="952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" exitCode=0 Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.910326 4998 generic.go:334] "Generic (PLEG): container finished" podID="596da915-f039-417b-8700-55f18846872e" containerID="8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" exitCode=0 Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.910441 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.911303 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerDied","Data":"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872"} Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.911329 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerDied","Data":"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b"} Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.911339 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"596da915-f039-417b-8700-55f18846872e","Type":"ContainerDied","Data":"a6308402886347991fab1058427e45c86736ae74f165ee1b5dbb523ef6e54cc6"} Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.911354 4998 scope.go:117] "RemoveContainer" containerID="ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.915065 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"469f046f-ad13-49c3-b9da-ac6c46b48882","Type":"ContainerStarted","Data":"f2a24f1318412e0ce347c11381842b1ca5930474f16bd5ab40d90a05d51235ce"} Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937144 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937354 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjxlz\" (UniqueName: \"kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937561 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937651 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937703 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937725 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.937828 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data\") pod \"596da915-f039-417b-8700-55f18846872e\" (UID: \"596da915-f039-417b-8700-55f18846872e\") " Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.939221 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.939446 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.946983 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz" (OuterVolumeSpecName: "kube-api-access-tjxlz") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "kube-api-access-tjxlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.948904 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts" (OuterVolumeSpecName: "scripts") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.950765 4998 scope.go:117] "RemoveContainer" containerID="29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.979881 4998 scope.go:117] "RemoveContainer" containerID="952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" Feb 03 09:07:20 crc kubenswrapper[4998]: I0203 09:07:20.999007 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.041360 4998 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.041389 4998 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/596da915-f039-417b-8700-55f18846872e-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.041398 4998 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.041406 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjxlz\" (UniqueName: \"kubernetes.io/projected/596da915-f039-417b-8700-55f18846872e-kube-api-access-tjxlz\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.041435 4998 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.045255 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.106117 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data" (OuterVolumeSpecName: "config-data") pod "596da915-f039-417b-8700-55f18846872e" (UID: "596da915-f039-417b-8700-55f18846872e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.144620 4998 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.144655 4998 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596da915-f039-417b-8700-55f18846872e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.245634 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.275859 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.297836 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.298373 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596da915-f039-417b-8700-55f18846872e" containerName="sg-core" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298396 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="596da915-f039-417b-8700-55f18846872e" containerName="sg-core" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.298420 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596da915-f039-417b-8700-55f18846872e" containerName="proxy-httpd" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298428 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="596da915-f039-417b-8700-55f18846872e" containerName="proxy-httpd" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.298445 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-central-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298453 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-central-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.298485 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-notification-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298494 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-notification-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298743 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-central-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298772 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="596da915-f039-417b-8700-55f18846872e" containerName="proxy-httpd" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298812 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="596da915-f039-417b-8700-55f18846872e" containerName="sg-core" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.298830 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="596da915-f039-417b-8700-55f18846872e" containerName="ceilometer-notification-agent" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.302303 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.304768 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.305374 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.322947 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453338 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453438 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-log-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453476 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdntn\" (UniqueName: \"kubernetes.io/projected/984b43d6-921d-41ef-b77d-66290a22450b-kube-api-access-cdntn\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453629 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-config-data\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453668 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453719 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-scripts\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.453826 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-run-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.484066 4998 scope.go:117] "RemoveContainer" containerID="8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.505327 4998 scope.go:117] "RemoveContainer" containerID="ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.505647 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd\": container with ID starting with ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd not found: ID does not exist" containerID="ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.505678 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd"} err="failed to get container status \"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd\": rpc error: code = NotFound desc = could not find container \"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd\": container with ID starting with ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.505708 4998 scope.go:117] "RemoveContainer" containerID="29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.506066 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141\": container with ID starting with 29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141 not found: ID does not exist" containerID="29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506088 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141"} err="failed to get container status \"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141\": rpc error: code = NotFound desc = could not find container \"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141\": container with ID starting with 29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141 not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506100 4998 scope.go:117] "RemoveContainer" containerID="952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.506469 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872\": container with ID starting with 952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872 not found: ID does not exist" containerID="952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506489 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872"} err="failed to get container status \"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872\": rpc error: code = NotFound desc = could not find container \"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872\": container with ID starting with 952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872 not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506501 4998 scope.go:117] "RemoveContainer" containerID="8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" Feb 03 09:07:21 crc kubenswrapper[4998]: E0203 09:07:21.506816 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b\": container with ID starting with 8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b not found: ID does not exist" containerID="8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506856 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b"} err="failed to get container status \"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b\": rpc error: code = NotFound desc = could not find container \"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b\": container with ID starting with 8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.506870 4998 scope.go:117] "RemoveContainer" containerID="ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507099 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd"} err="failed to get container status \"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd\": rpc error: code = NotFound desc = could not find container \"ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd\": container with ID starting with ec2951d89c95452bd0997abfd31d8f3b728d2392980a7c49cd64a6718e571ddd not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507117 4998 scope.go:117] "RemoveContainer" containerID="29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507422 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141"} err="failed to get container status \"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141\": rpc error: code = NotFound desc = could not find container \"29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141\": container with ID starting with 29d4c983c22c963a324a47da6c7105282e9b2027172f3bf518db6d47c491a141 not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507441 4998 scope.go:117] "RemoveContainer" containerID="952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507680 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872"} err="failed to get container status \"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872\": rpc error: code = NotFound desc = could not find container \"952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872\": container with ID starting with 952e6de60f0a115a26b676f93dfc2fdbcace0bdf5e76a1a1d5c153b7bee59872 not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507699 4998 scope.go:117] "RemoveContainer" containerID="8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.507936 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b"} err="failed to get container status \"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b\": rpc error: code = NotFound desc = could not find container \"8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b\": container with ID starting with 8f573a7afae0349379da92c8c038de38f8a3a2ec02d5ac518dfd835d2e7f716b not found: ID does not exist" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555423 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-log-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555476 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdntn\" (UniqueName: \"kubernetes.io/projected/984b43d6-921d-41ef-b77d-66290a22450b-kube-api-access-cdntn\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555621 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-config-data\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555660 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555702 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-scripts\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555815 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-run-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.555949 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.560098 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-log-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.560166 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.561641 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/984b43d6-921d-41ef-b77d-66290a22450b-run-httpd\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.574709 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.583274 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdntn\" (UniqueName: \"kubernetes.io/projected/984b43d6-921d-41ef-b77d-66290a22450b-kube-api-access-cdntn\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.584030 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-config-data\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.585686 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/984b43d6-921d-41ef-b77d-66290a22450b-scripts\") pod \"ceilometer-0\" (UID: \"984b43d6-921d-41ef-b77d-66290a22450b\") " pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.634268 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 09:07:21 crc kubenswrapper[4998]: I0203 09:07:21.950111 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"469f046f-ad13-49c3-b9da-ac6c46b48882","Type":"ContainerStarted","Data":"788004b1287e25095fde04a20b9461d4e567c29a8d18e7d31f0fc4682f33a8f0"} Feb 03 09:07:22 crc kubenswrapper[4998]: I0203 09:07:22.216066 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 09:07:22 crc kubenswrapper[4998]: I0203 09:07:22.442261 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596da915-f039-417b-8700-55f18846872e" path="/var/lib/kubelet/pods/596da915-f039-417b-8700-55f18846872e/volumes" Feb 03 09:07:22 crc kubenswrapper[4998]: I0203 09:07:22.969599 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"984b43d6-921d-41ef-b77d-66290a22450b","Type":"ContainerStarted","Data":"56453c66876b26acf4ee9cd64e83491f7139c8beadcb2f2878b0353d904ee872"} Feb 03 09:07:22 crc kubenswrapper[4998]: I0203 09:07:22.970155 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"984b43d6-921d-41ef-b77d-66290a22450b","Type":"ContainerStarted","Data":"33038d625f8f5581d982d1b0018b4e549b6779f78e07a8f2b7aa6be8877d3ac1"} Feb 03 09:07:23 crc kubenswrapper[4998]: I0203 09:07:23.982315 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"469f046f-ad13-49c3-b9da-ac6c46b48882","Type":"ContainerStarted","Data":"3363d1d8aee001b422e35ca79dba4f41f524e5db76913fefeec0ae9bb8251645"} Feb 03 09:07:23 crc kubenswrapper[4998]: I0203 09:07:23.984286 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"984b43d6-921d-41ef-b77d-66290a22450b","Type":"ContainerStarted","Data":"8da81099123c316d5f0f7145e7dbb0da8898a1db6093c15a0aa10661dda5a453"} Feb 03 09:07:24 crc kubenswrapper[4998]: I0203 09:07:24.007222 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.244320873 podStartE2EDuration="7.007195678s" podCreationTimestamp="2026-02-03 09:07:17 +0000 UTC" firstStartedPulling="2026-02-03 09:07:18.47775754 +0000 UTC m=+8476.764451336" lastFinishedPulling="2026-02-03 09:07:23.240632335 +0000 UTC m=+8481.527326141" observedRunningTime="2026-02-03 09:07:23.999018706 +0000 UTC m=+8482.285712532" watchObservedRunningTime="2026-02-03 09:07:24.007195678 +0000 UTC m=+8482.293889504" Feb 03 09:07:24 crc kubenswrapper[4998]: I0203 09:07:24.995747 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"984b43d6-921d-41ef-b77d-66290a22450b","Type":"ContainerStarted","Data":"3ab421779f7b43d0c064222959b00c03ef84e013634fbc4b5c6eef89f080a457"} Feb 03 09:07:27 crc kubenswrapper[4998]: I0203 09:07:27.016509 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"984b43d6-921d-41ef-b77d-66290a22450b","Type":"ContainerStarted","Data":"4ea7dd8520e58ba7270934dbf7be24c54d09a5f49afc204e53be260dd6ee0411"} Feb 03 09:07:27 crc kubenswrapper[4998]: I0203 09:07:27.017081 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 09:07:27 crc kubenswrapper[4998]: I0203 09:07:27.041539 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.98683888 podStartE2EDuration="6.0415185s" podCreationTimestamp="2026-02-03 09:07:21 +0000 UTC" firstStartedPulling="2026-02-03 09:07:22.262953349 +0000 UTC m=+8480.549647155" lastFinishedPulling="2026-02-03 09:07:26.317632969 +0000 UTC m=+8484.604326775" observedRunningTime="2026-02-03 09:07:27.040647595 +0000 UTC m=+8485.327341431" watchObservedRunningTime="2026-02-03 09:07:27.0415185 +0000 UTC m=+8485.328212306" Feb 03 09:07:37 crc kubenswrapper[4998]: I0203 09:07:37.049544 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-lv7qc"] Feb 03 09:07:37 crc kubenswrapper[4998]: I0203 09:07:37.063348 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-lv7qc"] Feb 03 09:07:38 crc kubenswrapper[4998]: I0203 09:07:38.440331 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ec901bf-03bf-4799-acc8-095e4196e78a" path="/var/lib/kubelet/pods/2ec901bf-03bf-4799-acc8-095e4196e78a/volumes" Feb 03 09:07:51 crc kubenswrapper[4998]: I0203 09:07:51.649201 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 09:08:12 crc kubenswrapper[4998]: I0203 09:08:12.569839 4998 scope.go:117] "RemoveContainer" containerID="bf498f4b9b405e769594b3781e6efa028ea2c250195e5ba629ce57e1a62e7363" Feb 03 09:08:12 crc kubenswrapper[4998]: I0203 09:08:12.605011 4998 scope.go:117] "RemoveContainer" containerID="8a40473c459da532d3dfc96afffb93bbf13a0a6ee5ba9dc18230bd0b6ba357e8" Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.060145 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-940a-account-create-update-d2hxr"] Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.077396 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nzthm"] Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.089660 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-940a-account-create-update-d2hxr"] Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.110313 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nzthm"] Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.438858 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2007b61e-5077-42b3-91d8-88725aef70a6" path="/var/lib/kubelet/pods/2007b61e-5077-42b3-91d8-88725aef70a6/volumes" Feb 03 09:08:38 crc kubenswrapper[4998]: I0203 09:08:38.439883 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c00f9260-e15f-4da2-80bc-2a17a188ad20" path="/var/lib/kubelet/pods/c00f9260-e15f-4da2-80bc-2a17a188ad20/volumes" Feb 03 09:08:42 crc kubenswrapper[4998]: I0203 09:08:42.754314 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:08:42 crc kubenswrapper[4998]: I0203 09:08:42.754987 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.041459 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q7fx2/must-gather-bbfgf"] Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.043644 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.045591 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q7fx2"/"kube-root-ca.crt" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.046906 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q7fx2"/"openshift-service-ca.crt" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.074303 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q7fx2/must-gather-bbfgf"] Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.074695 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x957v\" (UniqueName: \"kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.074741 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.176627 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x957v\" (UniqueName: \"kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.176676 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.177165 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.198227 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x957v\" (UniqueName: \"kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v\") pod \"must-gather-bbfgf\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.365927 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:08:51 crc kubenswrapper[4998]: I0203 09:08:51.987654 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q7fx2/must-gather-bbfgf"] Feb 03 09:08:52 crc kubenswrapper[4998]: I0203 09:08:52.557501 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" event={"ID":"84c082cf-763b-4ee5-b4db-74e56eed0d85","Type":"ContainerStarted","Data":"0ea464a6541424674a6f1e4e587f6c892c6f404f85a4a77da3ed7dce4f0e47ee"} Feb 03 09:09:02 crc kubenswrapper[4998]: I0203 09:09:02.666464 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" event={"ID":"84c082cf-763b-4ee5-b4db-74e56eed0d85","Type":"ContainerStarted","Data":"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875"} Feb 03 09:09:03 crc kubenswrapper[4998]: I0203 09:09:03.680679 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" event={"ID":"84c082cf-763b-4ee5-b4db-74e56eed0d85","Type":"ContainerStarted","Data":"85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6"} Feb 03 09:09:03 crc kubenswrapper[4998]: I0203 09:09:03.702082 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" podStartSLOduration=2.691884986 podStartE2EDuration="12.702058109s" podCreationTimestamp="2026-02-03 09:08:51 +0000 UTC" firstStartedPulling="2026-02-03 09:08:51.982463406 +0000 UTC m=+8570.269157212" lastFinishedPulling="2026-02-03 09:09:01.992636529 +0000 UTC m=+8580.279330335" observedRunningTime="2026-02-03 09:09:03.700440783 +0000 UTC m=+8581.987134599" watchObservedRunningTime="2026-02-03 09:09:03.702058109 +0000 UTC m=+8581.988751925" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.103520 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-rmbkd"] Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.105371 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.107810 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-q7fx2"/"default-dockercfg-j4vq8" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.269623 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.270168 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlbc7\" (UniqueName: \"kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.372455 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.372541 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.372799 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlbc7\" (UniqueName: \"kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.402605 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlbc7\" (UniqueName: \"kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7\") pod \"crc-debug-rmbkd\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.428425 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:09:07 crc kubenswrapper[4998]: W0203 09:09:07.466195 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8dae701d_e940_4730_9677_780fbc013b47.slice/crio-57f8de8cdb007ddfeed6cb93a5ec19cc66efb9c8697ffcef140c4d7c91b7a8a7 WatchSource:0}: Error finding container 57f8de8cdb007ddfeed6cb93a5ec19cc66efb9c8697ffcef140c4d7c91b7a8a7: Status 404 returned error can't find the container with id 57f8de8cdb007ddfeed6cb93a5ec19cc66efb9c8697ffcef140c4d7c91b7a8a7 Feb 03 09:09:07 crc kubenswrapper[4998]: I0203 09:09:07.765726 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" event={"ID":"8dae701d-e940-4730-9677-780fbc013b47","Type":"ContainerStarted","Data":"57f8de8cdb007ddfeed6cb93a5ec19cc66efb9c8697ffcef140c4d7c91b7a8a7"} Feb 03 09:09:12 crc kubenswrapper[4998]: I0203 09:09:12.714098 4998 scope.go:117] "RemoveContainer" containerID="95da58851bc9946b8a4fa2b4da776de8120597826477bfed63a1cfd057a47cc4" Feb 03 09:09:12 crc kubenswrapper[4998]: I0203 09:09:12.758005 4998 scope.go:117] "RemoveContainer" containerID="92b69d37e147edcc833dc9fbd9c4326a8983cd20bb17ca5e1265fb5e95ef1f99" Feb 03 09:09:12 crc kubenswrapper[4998]: I0203 09:09:12.758275 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:09:12 crc kubenswrapper[4998]: I0203 09:09:12.758320 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:09:26 crc kubenswrapper[4998]: I0203 09:09:26.068622 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-f5m7z"] Feb 03 09:09:26 crc kubenswrapper[4998]: I0203 09:09:26.089459 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-f5m7z"] Feb 03 09:09:26 crc kubenswrapper[4998]: I0203 09:09:26.439152 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebbf8d47-3f9c-451c-ba8f-d37a8c74e692" path="/var/lib/kubelet/pods/ebbf8d47-3f9c-451c-ba8f-d37a8c74e692/volumes" Feb 03 09:09:27 crc kubenswrapper[4998]: E0203 09:09:27.476847 4998 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Feb 03 09:09:27 crc kubenswrapper[4998]: E0203 09:09:27.477308 4998 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jlbc7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-rmbkd_openshift-must-gather-q7fx2(8dae701d-e940-4730-9677-780fbc013b47): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 09:09:27 crc kubenswrapper[4998]: E0203 09:09:27.478515 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" podUID="8dae701d-e940-4730-9677-780fbc013b47" Feb 03 09:09:28 crc kubenswrapper[4998]: E0203 09:09:28.039054 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" podUID="8dae701d-e940-4730-9677-780fbc013b47" Feb 03 09:09:42 crc kubenswrapper[4998]: I0203 09:09:42.754386 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:09:42 crc kubenswrapper[4998]: I0203 09:09:42.754927 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:09:42 crc kubenswrapper[4998]: I0203 09:09:42.754964 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 09:09:42 crc kubenswrapper[4998]: I0203 09:09:42.755742 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 09:09:42 crc kubenswrapper[4998]: I0203 09:09:42.755813 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8" gracePeriod=600 Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.183844 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" event={"ID":"8dae701d-e940-4730-9677-780fbc013b47","Type":"ContainerStarted","Data":"7dfe272157db67fd4d89c1f1a6dfb0b033877a805a3da33e4e71c6c70facf030"} Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.188499 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8" exitCode=0 Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.188533 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8"} Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.188554 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97"} Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.188573 4998 scope.go:117] "RemoveContainer" containerID="f0960bf93408fbd80b1af606d0b0b825518c12f926ca6f6b8748be736ddfda77" Feb 03 09:09:43 crc kubenswrapper[4998]: I0203 09:09:43.214213 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" podStartSLOduration=0.975918468 podStartE2EDuration="36.214191792s" podCreationTimestamp="2026-02-03 09:09:07 +0000 UTC" firstStartedPulling="2026-02-03 09:09:07.46897315 +0000 UTC m=+8585.755666956" lastFinishedPulling="2026-02-03 09:09:42.707246474 +0000 UTC m=+8620.993940280" observedRunningTime="2026-02-03 09:09:43.201406979 +0000 UTC m=+8621.488100785" watchObservedRunningTime="2026-02-03 09:09:43.214191792 +0000 UTC m=+8621.500885598" Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.049004 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9mbqc"] Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.061728 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9mbqc"] Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.074581 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8468-account-create-update-rmt9s"] Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.085032 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8468-account-create-update-rmt9s"] Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.441402 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c1e0c01-3615-4acc-93cd-16ec35777578" path="/var/lib/kubelet/pods/9c1e0c01-3615-4acc-93cd-16ec35777578/volumes" Feb 03 09:10:00 crc kubenswrapper[4998]: I0203 09:10:00.442146 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5331f1-33b8-454f-9d08-2ba013f7e25c" path="/var/lib/kubelet/pods/fd5331f1-33b8-454f-9d08-2ba013f7e25c/volumes" Feb 03 09:10:09 crc kubenswrapper[4998]: I0203 09:10:09.029415 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bd5gs"] Feb 03 09:10:09 crc kubenswrapper[4998]: I0203 09:10:09.041082 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bd5gs"] Feb 03 09:10:10 crc kubenswrapper[4998]: I0203 09:10:10.468048 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c94ec51-b96c-405a-990b-acb3a0f85209" path="/var/lib/kubelet/pods/7c94ec51-b96c-405a-990b-acb3a0f85209/volumes" Feb 03 09:10:12 crc kubenswrapper[4998]: I0203 09:10:12.527974 4998 generic.go:334] "Generic (PLEG): container finished" podID="8dae701d-e940-4730-9677-780fbc013b47" containerID="7dfe272157db67fd4d89c1f1a6dfb0b033877a805a3da33e4e71c6c70facf030" exitCode=0 Feb 03 09:10:12 crc kubenswrapper[4998]: I0203 09:10:12.528061 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" event={"ID":"8dae701d-e940-4730-9677-780fbc013b47","Type":"ContainerDied","Data":"7dfe272157db67fd4d89c1f1a6dfb0b033877a805a3da33e4e71c6c70facf030"} Feb 03 09:10:12 crc kubenswrapper[4998]: I0203 09:10:12.915204 4998 scope.go:117] "RemoveContainer" containerID="c4e29ae12016a70ac65c7f04bb6e1ad62b1391bbb823c33996b9f1dbd51ebc3d" Feb 03 09:10:12 crc kubenswrapper[4998]: I0203 09:10:12.953254 4998 scope.go:117] "RemoveContainer" containerID="74c0ecb2d4a218c66bc96717b1c7e9e91e07bbebe7ec8f12aa6995374242094c" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.033884 4998 scope.go:117] "RemoveContainer" containerID="916d6a71c52755cdc0ef2441feab032eab2c50532cb4003e401f9255bda2a0c8" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.077862 4998 scope.go:117] "RemoveContainer" containerID="0bcfd5400f96288128f3d5fdd47a724da2b19a30f4f1341919ab921e753209a6" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.610487 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.653449 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-rmbkd"] Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.665113 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-rmbkd"] Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.739485 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlbc7\" (UniqueName: \"kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7\") pod \"8dae701d-e940-4730-9677-780fbc013b47\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.739697 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host\") pod \"8dae701d-e940-4730-9677-780fbc013b47\" (UID: \"8dae701d-e940-4730-9677-780fbc013b47\") " Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.739930 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host" (OuterVolumeSpecName: "host") pod "8dae701d-e940-4730-9677-780fbc013b47" (UID: "8dae701d-e940-4730-9677-780fbc013b47"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.740244 4998 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dae701d-e940-4730-9677-780fbc013b47-host\") on node \"crc\" DevicePath \"\"" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.745522 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7" (OuterVolumeSpecName: "kube-api-access-jlbc7") pod "8dae701d-e940-4730-9677-780fbc013b47" (UID: "8dae701d-e940-4730-9677-780fbc013b47"). InnerVolumeSpecName "kube-api-access-jlbc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:10:13 crc kubenswrapper[4998]: I0203 09:10:13.842242 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlbc7\" (UniqueName: \"kubernetes.io/projected/8dae701d-e940-4730-9677-780fbc013b47-kube-api-access-jlbc7\") on node \"crc\" DevicePath \"\"" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.439515 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dae701d-e940-4730-9677-780fbc013b47" path="/var/lib/kubelet/pods/8dae701d-e940-4730-9677-780fbc013b47/volumes" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.553891 4998 scope.go:117] "RemoveContainer" containerID="7dfe272157db67fd4d89c1f1a6dfb0b033877a805a3da33e4e71c6c70facf030" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.554091 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-rmbkd" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.830278 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-vbb7l"] Feb 03 09:10:14 crc kubenswrapper[4998]: E0203 09:10:14.830777 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dae701d-e940-4730-9677-780fbc013b47" containerName="container-00" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.830823 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dae701d-e940-4730-9677-780fbc013b47" containerName="container-00" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.831103 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dae701d-e940-4730-9677-780fbc013b47" containerName="container-00" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.832038 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.834471 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-q7fx2"/"default-dockercfg-j4vq8" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.965710 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:14 crc kubenswrapper[4998]: I0203 09:10:14.965844 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5jpj\" (UniqueName: \"kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.067266 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5jpj\" (UniqueName: \"kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.067501 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.067612 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.086698 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5jpj\" (UniqueName: \"kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj\") pod \"crc-debug-vbb7l\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.156136 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.570385 4998 generic.go:334] "Generic (PLEG): container finished" podID="0b5a9d03-7183-4289-ae83-2c1796c750c7" containerID="ad6242d8f40f9cbdc0dab2752543bedb17fee552373ddd2404934c43256adaf8" exitCode=1 Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.570484 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" event={"ID":"0b5a9d03-7183-4289-ae83-2c1796c750c7","Type":"ContainerDied","Data":"ad6242d8f40f9cbdc0dab2752543bedb17fee552373ddd2404934c43256adaf8"} Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.570705 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" event={"ID":"0b5a9d03-7183-4289-ae83-2c1796c750c7","Type":"ContainerStarted","Data":"33f784a45a080b7b840c3ee1b8e075c4d33929fc270d5b057a733d3c3c52a929"} Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.619010 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-vbb7l"] Feb 03 09:10:15 crc kubenswrapper[4998]: I0203 09:10:15.629472 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q7fx2/crc-debug-vbb7l"] Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.697902 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.807462 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host\") pod \"0b5a9d03-7183-4289-ae83-2c1796c750c7\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.807653 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5jpj\" (UniqueName: \"kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj\") pod \"0b5a9d03-7183-4289-ae83-2c1796c750c7\" (UID: \"0b5a9d03-7183-4289-ae83-2c1796c750c7\") " Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.808879 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host" (OuterVolumeSpecName: "host") pod "0b5a9d03-7183-4289-ae83-2c1796c750c7" (UID: "0b5a9d03-7183-4289-ae83-2c1796c750c7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.825612 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj" (OuterVolumeSpecName: "kube-api-access-v5jpj") pod "0b5a9d03-7183-4289-ae83-2c1796c750c7" (UID: "0b5a9d03-7183-4289-ae83-2c1796c750c7"). InnerVolumeSpecName "kube-api-access-v5jpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.909800 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5jpj\" (UniqueName: \"kubernetes.io/projected/0b5a9d03-7183-4289-ae83-2c1796c750c7-kube-api-access-v5jpj\") on node \"crc\" DevicePath \"\"" Feb 03 09:10:16 crc kubenswrapper[4998]: I0203 09:10:16.910090 4998 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0b5a9d03-7183-4289-ae83-2c1796c750c7-host\") on node \"crc\" DevicePath \"\"" Feb 03 09:10:17 crc kubenswrapper[4998]: I0203 09:10:17.591265 4998 scope.go:117] "RemoveContainer" containerID="ad6242d8f40f9cbdc0dab2752543bedb17fee552373ddd2404934c43256adaf8" Feb 03 09:10:17 crc kubenswrapper[4998]: I0203 09:10:17.591338 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/crc-debug-vbb7l" Feb 03 09:10:18 crc kubenswrapper[4998]: I0203 09:10:18.440049 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b5a9d03-7183-4289-ae83-2c1796c750c7" path="/var/lib/kubelet/pods/0b5a9d03-7183-4289-ae83-2c1796c750c7/volumes" Feb 03 09:10:41 crc kubenswrapper[4998]: I0203 09:10:41.862285 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a4055a58-20e7-4b1b-82da-876889ea8c68/init-config-reloader/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.116282 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a4055a58-20e7-4b1b-82da-876889ea8c68/config-reloader/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.125403 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a4055a58-20e7-4b1b-82da-876889ea8c68/init-config-reloader/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.152371 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a4055a58-20e7-4b1b-82da-876889ea8c68/alertmanager/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.325326 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_469f046f-ad13-49c3-b9da-ac6c46b48882/aodh-api/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.337106 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_469f046f-ad13-49c3-b9da-ac6c46b48882/aodh-listener/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.393223 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_469f046f-ad13-49c3-b9da-ac6c46b48882/aodh-evaluator/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.475150 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_469f046f-ad13-49c3-b9da-ac6c46b48882/aodh-notifier/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.512892 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-c2af-account-create-update-5b6b9_aecb9cbd-a669-4ea1-969e-424637c3b33d/mariadb-account-create-update/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.890240 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-create-cgx48_61eefdf4-447e-4503-b1c7-b36d866c1aec/mariadb-database-create/0.log" Feb 03 09:10:42 crc kubenswrapper[4998]: I0203 09:10:42.987515 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-sync-gghpz_af5e8da4-94ba-438f-b45d-8052aff4265f/aodh-db-sync/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.110250 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6ff6f58c8-vtpc6_551dd212-c750-41d2-8eab-7b2f25cd8d20/barbican-api/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.210304 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6ff6f58c8-vtpc6_551dd212-c750-41d2-8eab-7b2f25cd8d20/barbican-api-log/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.329631 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d84f77cbd-f74wg_e9db618d-21b6-4555-9c37-d25b45669164/barbican-keystone-listener/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.354240 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d84f77cbd-f74wg_e9db618d-21b6-4555-9c37-d25b45669164/barbican-keystone-listener-log/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.458310 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-649f778c97-ksh5g_5c31eb51-94cb-4eee-b8e2-b1ae44f370ad/barbican-worker/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.537688 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-649f778c97-ksh5g_5c31eb51-94cb-4eee-b8e2-b1ae44f370ad/barbican-worker-log/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.666880 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_984b43d6-921d-41ef-b77d-66290a22450b/ceilometer-central-agent/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.740731 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_984b43d6-921d-41ef-b77d-66290a22450b/ceilometer-notification-agent/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.753274 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_984b43d6-921d-41ef-b77d-66290a22450b/proxy-httpd/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.794227 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_984b43d6-921d-41ef-b77d-66290a22450b/sg-core/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.937318 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_af29c0c2-777f-4625-be87-e41e23c29f71/cinder-api-log/0.log" Feb 03 09:10:43 crc kubenswrapper[4998]: I0203 09:10:43.948048 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_af29c0c2-777f-4625-be87-e41e23c29f71/cinder-api/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.112967 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_991a6b49-bfc0-4fa8-a503-1287a18010e0/cinder-scheduler/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.131406 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_991a6b49-bfc0-4fa8-a503-1287a18010e0/probe/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.251576 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5ccf7dbd4c-vvwtt_bfa12fa9-60f0-42b4-88e6-06431f8218dc/init/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.404201 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5ccf7dbd4c-vvwtt_bfa12fa9-60f0-42b4-88e6-06431f8218dc/init/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.427769 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bd51c9c0-8d3a-4119-bbab-3f97a800d180/glance-httpd/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.492082 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5ccf7dbd4c-vvwtt_bfa12fa9-60f0-42b4-88e6-06431f8218dc/dnsmasq-dns/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.577708 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bd51c9c0-8d3a-4119-bbab-3f97a800d180/glance-log/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.674304 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d8d27788-6ccd-4466-9b8a-718a9b1d4d82/glance-log/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.694613 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d8d27788-6ccd-4466-9b8a-718a9b1d4d82/glance-httpd/0.log" Feb 03 09:10:44 crc kubenswrapper[4998]: I0203 09:10:44.930413 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-56b7876679-z7x8v_94c40d17-0169-47e7-b150-10a8a4a06c89/heat-api/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.009633 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-b776-account-create-update-bvltf_2dd9ecda-c38f-43dd-8002-eacd214a4502/mariadb-account-create-update/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.177834 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-586cd89d46-clnps_60f33157-352c-49ac-ba85-ae425330ba78/heat-cfnapi/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.210677 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-create-22hsh_9946bf42-1fb1-4866-b14d-4ce08d52cde9/mariadb-database-create/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.437412 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-sync-bjggq_8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc/heat-db-sync/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.475930 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-7d96fb7486-gb7dx_86d4c222-a376-4173-b257-eed1ca5fbd5c/heat-engine/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.693316 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d4bfc795c-4fj69_ab381bc8-8294-4c0a-91c1-f9bd491c66a1/horizon/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.724955 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d4bfc795c-4fj69_ab381bc8-8294-4c0a-91c1-f9bd491c66a1/horizon-log/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.777568 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5bdf89b5c5-zrqjk_94702dda-48c1-4162-976e-0d624e111b04/keystone-api/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.898030 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29501821-lp2m4_b0bb1367-1e0d-4d99-aee4-dd02fc2fa67c/keystone-cron/0.log" Feb 03 09:10:45 crc kubenswrapper[4998]: I0203 09:10:45.982032 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7afff0dd-f948-4551-b5ab-54c33db00a60/kube-state-metrics/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.075322 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_e5ea5e31-cb66-4f96-a051-0730ce894ec2/adoption/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.302126 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-b8865847c-ztg5q_df70d98e-43b5-4a3f-98b8-57c2b577e1c8/neutron-httpd/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.303754 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-b8865847c-ztg5q_df70d98e-43b5-4a3f-98b8-57c2b577e1c8/neutron-api/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.590689 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e5de42bf-ed76-4b09-9e10-5dffcd9476b6/nova-api-api/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.620578 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e5de42bf-ed76-4b09-9e10-5dffcd9476b6/nova-api-log/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.727425 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-13d7-account-create-update-jc8qq_392cb9be-6c09-4ec6-8615-6d9978b0dfc9/mariadb-account-create-update/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.920010 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-db-create-gc8cr_77ff460f-4a6b-4f54-987d-e3b87003e735/mariadb-database-create/0.log" Feb 03 09:10:46 crc kubenswrapper[4998]: I0203 09:10:46.990952 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-cell-mapping-gq6wg_ee0c9709-6a32-4f5b-8458-cdb322580330/nova-manage/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.218910 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_57819439-d059-440d-a0ea-2d224cd27173/nova-cell0-conductor-conductor/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.257278 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-db-sync-rr8jz_cffdc337-9f79-401a-9af2-8f319a5ed5fb/nova-cell0-conductor-db-sync/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.416018 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-db-create-clppj_43447ea3-026c-476d-a8d0-f44de45d6e67/mariadb-database-create/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.523143 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-f8dc-account-create-update-442th_c8770d44-dcad-42f2-8637-1cf4213b1358/mariadb-account-create-update/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.755613 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-af76-account-create-update-l4c42_f47f17fe-5026-4c73-8b4e-3ac63c890885/mariadb-account-create-update/0.log" Feb 03 09:10:47 crc kubenswrapper[4998]: I0203 09:10:47.828634 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-cell-mapping-rqj97_728f12ea-0558-4049-9c2b-b060c3095656/nova-manage/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.085761 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-db-sync-46zpr_1fb98f13-657a-4351-a9f6-11dcfd10d016/nova-cell1-conductor-db-sync/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.124685 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_211b9c1c-e5d3-4781-a098-fa66abea3a5a/nova-cell1-conductor-conductor/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.324878 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-db-create-pz4fh_d3408867-7c55-4f2a-ba1a-d47cc8dd38cb/mariadb-database-create/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.401690 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_033d6aaa-cca8-4ec3-af93-1b03bfb969ee/nova-cell1-novncproxy-novncproxy/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.586907 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-594c-account-create-update-wpnf2_d14b7d22-8788-441a-9b12-a410d9622e74/mariadb-account-create-update/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.674312 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-cell-mapping-r6m22_2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef/nova-manage/0.log" Feb 03 09:10:48 crc kubenswrapper[4998]: I0203 09:10:48.970027 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-conductor-db-sync-v9jd9_0b45c128-79bc-48cf-81f8-de28ca277c36/nova-cell2-conductor-db-sync/0.log" Feb 03 09:10:49 crc kubenswrapper[4998]: I0203 09:10:49.036885 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-conductor-0_89c6925b-18c7-4964-a604-d66158e99e54/nova-cell2-conductor-conductor/0.log" Feb 03 09:10:49 crc kubenswrapper[4998]: I0203 09:10:49.167765 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-db-create-dczdx_6b7abcb2-6609-4cbf-aee3-936f5deba7dd/mariadb-database-create/0.log" Feb 03 09:10:49 crc kubenswrapper[4998]: I0203 09:10:49.295257 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell2-novncproxy-0_ffd397d0-f09c-4352-bdeb-2be3b4d6102f/nova-cell2-novncproxy-novncproxy/0.log" Feb 03 09:10:49 crc kubenswrapper[4998]: I0203 09:10:49.468985 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-a861-account-create-update-4t7rm_a9526202-62b4-4bfe-8638-b80b72772f9d/mariadb-account-create-update/0.log" Feb 03 09:10:49 crc kubenswrapper[4998]: I0203 09:10:49.827893 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-cell-mapping-j95hx_655b6245-34f9-4a4b-9853-a17be8282e97/nova-manage/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.038157 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-conductor-db-sync-jk9gl_a7243f69-0c30-44db-b97a-c287bca3afff/nova-cell3-conductor-db-sync/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.082939 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-conductor-0_17913e15-df86-4335-a95c-127ba12b91c9/nova-cell3-conductor-conductor/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.355769 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-db-create-sdzn8_d3975bdb-b42f-49ee-ac35-00ddcb9760af/mariadb-database-create/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.419515 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell3-novncproxy-0_7cefac34-9996-45f6-9093-5fdf673be4ab/nova-cell3-novncproxy-novncproxy/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.459829 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_eb644615-73e9-43ba-baab-c78d7881be54/memcached/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.677928 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_74c8cac3-60c1-475c-896f-f19ba0e995eb/nova-metadata-log/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.718138 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_74c8cac3-60c1-475c-896f-f19ba0e995eb/nova-metadata-metadata/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.816608 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b01e5169-e1ea-41dc-a09c-fd4922b93f97/nova-scheduler-scheduler/0.log" Feb 03 09:10:50 crc kubenswrapper[4998]: I0203 09:10:50.911952 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d3f299c1-ab12-4ba1-80fc-6d286f546d1d/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.086638 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d3f299c1-ab12-4ba1-80fc-6d286f546d1d/galera/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.104227 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell2-galera-0_ef339639-5ecd-4a27-899c-4b61d5ef5031/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.132322 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d3f299c1-ab12-4ba1-80fc-6d286f546d1d/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.287252 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell2-galera-0_ef339639-5ecd-4a27-899c-4b61d5ef5031/galera/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.299970 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell2-galera-0_ef339639-5ecd-4a27-899c-4b61d5ef5031/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.356404 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell3-galera-0_001c7640-7c7d-4cc2-846e-d9af02321908/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.533712 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell3-galera-0_001c7640-7c7d-4cc2-846e-d9af02321908/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.595666 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell3-galera-0_001c7640-7c7d-4cc2-846e-d9af02321908/galera/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.603629 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_396e3696-7910-4dca-9648-f4be4d5075d2/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.826574 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_396e3696-7910-4dca-9648-f4be4d5075d2/mysql-bootstrap/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.896087 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_396e3696-7910-4dca-9648-f4be4d5075d2/galera/0.log" Feb 03 09:10:51 crc kubenswrapper[4998]: I0203 09:10:51.922517 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_43f887ad-f9d1-4f23-b9be-21ad6bb1cd26/openstackclient/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.084965 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_20db43e7-c88c-4206-9909-c428d30d722c/adoption/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.106624 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_44c85897-bda5-4810-b7a8-35b57d1d7fc6/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.144978 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_44c85897-bda5-4810-b7a8-35b57d1d7fc6/ovn-northd/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.318814 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_251347b2-bf73-4795-93f6-2f4813fab858/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.334642 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_251347b2-bf73-4795-93f6-2f4813fab858/ovsdbserver-nb/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.392114 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_01a6a829-ee2b-4806-87d0-ec9b6c1e8211/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.448260 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_01a6a829-ee2b-4806-87d0-ec9b6c1e8211/ovsdbserver-nb/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.552079 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.619877 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_e6f35c23-9d37-42e1-a6ce-d2b3c2bd304f/ovsdbserver-nb/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.751575 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c2d19551-4618-40f2-8404-e4cbf850995b/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.791459 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c2d19551-4618-40f2-8404-e4cbf850995b/ovsdbserver-sb/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.797233 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_96054ea3-a586-4fae-b48a-e3a439c1944e/openstack-network-exporter/0.log" Feb 03 09:10:52 crc kubenswrapper[4998]: I0203 09:10:52.969528 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_96054ea3-a586-4fae-b48a-e3a439c1944e/ovsdbserver-sb/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.012013 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_09f3d0a5-438b-44d8-8e87-ff59e4cdecde/ovsdbserver-sb/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.069888 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_09f3d0a5-438b-44d8-8e87-ff59e4cdecde/openstack-network-exporter/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.171607 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-79f9fb6658-bmtrv_1657ff1a-5ff6-4191-b579-32e9168333d7/placement-api/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.261186 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-79f9fb6658-bmtrv_1657ff1a-5ff6-4191-b579-32e9168333d7/placement-log/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.296846 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac/init-config-reloader/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.542721 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac/init-config-reloader/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.585923 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac/prometheus/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.594140 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac/config-reloader/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.624594 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_dc3e90e8-8eb3-46e1-8fc9-3c3158c873ac/thanos-sidecar/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.797087 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_06d9046e-6151-41df-a973-abc9673c43e8/setup-container/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.970540 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell2-server-0_e735f12e-e7a4-47d6-b87e-c0e80dbd2cee/setup-container/0.log" Feb 03 09:10:53 crc kubenswrapper[4998]: I0203 09:10:53.970693 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_06d9046e-6151-41df-a973-abc9673c43e8/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.008162 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_06d9046e-6151-41df-a973-abc9673c43e8/rabbitmq/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.177392 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell2-server-0_e735f12e-e7a4-47d6-b87e-c0e80dbd2cee/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.217809 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell2-server-0_e735f12e-e7a4-47d6-b87e-c0e80dbd2cee/rabbitmq/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.247612 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell3-server-0_ca3b87c5-fc69-4b39-8ba1-a7d141370ca9/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.410917 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell3-server-0_ca3b87c5-fc69-4b39-8ba1-a7d141370ca9/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.449760 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_474e078d-6891-4062-a084-5208c534b46a/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.484167 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell3-server-0_ca3b87c5-fc69-4b39-8ba1-a7d141370ca9/rabbitmq/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.659941 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_474e078d-6891-4062-a084-5208c534b46a/setup-container/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.726388 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-768dd68f4d-g2qrv_f64949be-8e32-451e-a626-98930fa6b3a2/proxy-server/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.731743 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-768dd68f4d-g2qrv_f64949be-8e32-451e-a626-98930fa6b3a2/proxy-httpd/0.log" Feb 03 09:10:54 crc kubenswrapper[4998]: I0203 09:10:54.991923 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-b9slq_70c13234-a910-458e-9013-94c7269a2984/swift-ring-rebalance/0.log" Feb 03 09:10:55 crc kubenswrapper[4998]: I0203 09:10:55.487506 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_474e078d-6891-4062-a084-5208c534b46a/rabbitmq/0.log" Feb 03 09:11:09 crc kubenswrapper[4998]: I0203 09:11:09.052941 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-clppj"] Feb 03 09:11:09 crc kubenswrapper[4998]: I0203 09:11:09.064171 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-clppj"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.037354 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-f8dc-account-create-update-442th"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.050732 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pz4fh"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.108369 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-f8dc-account-create-update-442th"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.120409 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pz4fh"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.131567 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-13d7-account-create-update-jc8qq"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.141694 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell2-594c-account-create-update-wpnf2"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.151976 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell2-594c-account-create-update-wpnf2"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.163131 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell3-a861-account-create-update-4t7rm"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.177063 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell3-a861-account-create-update-4t7rm"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.181694 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell3-db-create-sdzn8"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.193464 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell2-db-create-dczdx"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.204165 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gc8cr"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.213570 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-13d7-account-create-update-jc8qq"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.222802 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell3-db-create-sdzn8"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.231067 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-af76-account-create-update-l4c42"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.240323 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell2-db-create-dczdx"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.248474 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-af76-account-create-update-l4c42"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.257657 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gc8cr"] Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.439020 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="392cb9be-6c09-4ec6-8615-6d9978b0dfc9" path="/var/lib/kubelet/pods/392cb9be-6c09-4ec6-8615-6d9978b0dfc9/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.440135 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43447ea3-026c-476d-a8d0-f44de45d6e67" path="/var/lib/kubelet/pods/43447ea3-026c-476d-a8d0-f44de45d6e67/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.440699 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b7abcb2-6609-4cbf-aee3-936f5deba7dd" path="/var/lib/kubelet/pods/6b7abcb2-6609-4cbf-aee3-936f5deba7dd/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.441245 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77ff460f-4a6b-4f54-987d-e3b87003e735" path="/var/lib/kubelet/pods/77ff460f-4a6b-4f54-987d-e3b87003e735/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.442248 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9526202-62b4-4bfe-8638-b80b72772f9d" path="/var/lib/kubelet/pods/a9526202-62b4-4bfe-8638-b80b72772f9d/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.442824 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8770d44-dcad-42f2-8637-1cf4213b1358" path="/var/lib/kubelet/pods/c8770d44-dcad-42f2-8637-1cf4213b1358/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.443376 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d14b7d22-8788-441a-9b12-a410d9622e74" path="/var/lib/kubelet/pods/d14b7d22-8788-441a-9b12-a410d9622e74/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.444347 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3408867-7c55-4f2a-ba1a-d47cc8dd38cb" path="/var/lib/kubelet/pods/d3408867-7c55-4f2a-ba1a-d47cc8dd38cb/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.444896 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3975bdb-b42f-49ee-ac35-00ddcb9760af" path="/var/lib/kubelet/pods/d3975bdb-b42f-49ee-ac35-00ddcb9760af/volumes" Feb 03 09:11:10 crc kubenswrapper[4998]: I0203 09:11:10.445411 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f47f17fe-5026-4c73-8b4e-3ac63c890885" path="/var/lib/kubelet/pods/f47f17fe-5026-4c73-8b4e-3ac63c890885/volumes" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.251325 4998 scope.go:117] "RemoveContainer" containerID="88b110efb177f72c1415962d3f641262f88abbaf621158a45c7187443cf347b9" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.304882 4998 scope.go:117] "RemoveContainer" containerID="f4c40facc0946f455ed4775b6d0e5cb263c684cf95b573e2f41a8a7c803deba7" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.367647 4998 scope.go:117] "RemoveContainer" containerID="3cd98e6664289bf77a93bbf0de8fd9ff2ef9b6a0d7dbcb5239ade7bdc42f355c" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.409454 4998 scope.go:117] "RemoveContainer" containerID="50b843722819961839fed788494b0822c527d53d7ef00ed12ed55eaea20bb7cc" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.462828 4998 scope.go:117] "RemoveContainer" containerID="0d7192364c123d3ec616888c63408e26da0a4d52e6d9646ecc38484da95a632b" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.507109 4998 scope.go:117] "RemoveContainer" containerID="1af80b4f8265d321ee73a6fd821dc4c687f3882fe53e7b77676c2e5bc35ec87c" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.590723 4998 scope.go:117] "RemoveContainer" containerID="e4226a46a622fe93175838739710ae8e71ddbaa0c3ec1f7e2436235e32d82646" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.638455 4998 scope.go:117] "RemoveContainer" containerID="93d79308ba28993bfcffa86b272cbe1cc9a9bc5a6b1bef7a9801bd7c3ad901cb" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.681471 4998 scope.go:117] "RemoveContainer" containerID="a6822c74e466c3758a9034b89e0e5332e70a083a17479363b43c3411698aa852" Feb 03 09:11:13 crc kubenswrapper[4998]: I0203 09:11:13.704056 4998 scope.go:117] "RemoveContainer" containerID="afcdb300c233945c662f2e1e4e16a6874a957fece7358a837479ee0e15c1076d" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.178767 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/util/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.395520 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/util/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.402540 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/pull/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.491186 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/pull/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.653143 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/util/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.688673 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/extract/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.688956 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_805de4b7130efd4fbe2290d0c51d78e6295b83d77efc3ff2d1b014ffe7sstbd_ec8df89a-256d-4eb4-97dc-57d42b4f34f5/pull/0.log" Feb 03 09:11:18 crc kubenswrapper[4998]: I0203 09:11:18.949650 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-jqjg8_5254fd85-6147-4f7f-9ed7-d5491795590e/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.002650 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-cqsb2_555310a7-1022-4224-8329-56cf0b598983/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.175667 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-r7xwj_15e1bb02-71fd-439f-b8b0-769aebffd30e/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.426040 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-gztww_00c60d3a-58c3-4ad9-a015-1dacdebef5dc/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.495664 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-5jdj9_44f95b1e-8d2d-4db0-8434-b4ae01d46f98/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.652150 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-6jgrc_c3e9afd3-207f-4a98-ab9a-1abb166da517/manager/0.log" Feb 03 09:11:19 crc kubenswrapper[4998]: I0203 09:11:19.865047 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-t2v45_6966131b-ab1c-4de2-9a32-8bcbd1d26c4a/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.156981 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-qrtp4_02d790b4-9b97-45e9-8efa-4cb81384bfae/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.244075 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-vxnv2_c96b6def-54b9-4b76-870c-7e504e58cca9/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.532150 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-rrghs_7c0e11e2-32c3-4d5b-889e-b5d55817c85c/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.534956 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-7fllq_e6885e8a-0fe6-44be-93e6-b5c663958e1f/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.590191 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-xgcxv_d838894e-3a4d-401c-b4d1-b4464d006b88/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.826231 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-5dbkk_154195a7-15d8-454e-8e95-2e5f3935d2da/manager/0.log" Feb 03 09:11:20 crc kubenswrapper[4998]: I0203 09:11:20.929896 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-c8rqm_447cd897-c504-49fa-82b8-5c205e002cfe/manager/0.log" Feb 03 09:11:21 crc kubenswrapper[4998]: I0203 09:11:21.034751 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5ff45dfdbfpdf4p_a954bf90-cac3-4896-bbaf-8ad98f3876d9/manager/0.log" Feb 03 09:11:21 crc kubenswrapper[4998]: I0203 09:11:21.302020 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6bf6665fd-l22vm_415bf1be-506f-4fc6-b7f5-abf9a0134900/operator/0.log" Feb 03 09:11:21 crc kubenswrapper[4998]: I0203 09:11:21.641988 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pm5bm_e43b0989-9c17-455c-827a-6db9db8a0039/registry-server/0.log" Feb 03 09:11:21 crc kubenswrapper[4998]: I0203 09:11:21.728836 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-4sdk7_73dcbbad-3aa0-48d6-ac55-ad4443f781d3/manager/0.log" Feb 03 09:11:21 crc kubenswrapper[4998]: I0203 09:11:21.966604 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-98dqh_d627e8f0-589b-44a6-bf5c-9049ac454363/manager/0.log" Feb 03 09:11:22 crc kubenswrapper[4998]: I0203 09:11:22.151129 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-h2s2c_0ab700e8-fa08-4fb9-9ef8-4053055f99ee/operator/0.log" Feb 03 09:11:22 crc kubenswrapper[4998]: I0203 09:11:22.297843 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-46ddp_68fc9816-016d-4444-8ebc-fb099a3e0d3c/manager/0.log" Feb 03 09:11:22 crc kubenswrapper[4998]: I0203 09:11:22.522853 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-5fwsr_d36616fb-0b5a-453d-b281-8df36af93238/manager/0.log" Feb 03 09:11:22 crc kubenswrapper[4998]: I0203 09:11:22.622298 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-4dj25_c24717b4-268b-49c0-82d1-b63ebcc16bf7/manager/0.log" Feb 03 09:11:22 crc kubenswrapper[4998]: I0203 09:11:22.756500 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-j4xmh_30f4bbbd-5f3f-4f45-96cf-33fc6c63f458/manager/0.log" Feb 03 09:11:23 crc kubenswrapper[4998]: I0203 09:11:23.371528 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-646f757d77-gxl8w_b4eb1b29-8a96-435e-ac43-e4ee5d349047/manager/0.log" Feb 03 09:11:33 crc kubenswrapper[4998]: I0203 09:11:33.044627 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rr8jz"] Feb 03 09:11:33 crc kubenswrapper[4998]: I0203 09:11:33.056311 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rr8jz"] Feb 03 09:11:34 crc kubenswrapper[4998]: I0203 09:11:34.439925 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cffdc337-9f79-401a-9af2-8f319a5ed5fb" path="/var/lib/kubelet/pods/cffdc337-9f79-401a-9af2-8f319a5ed5fb/volumes" Feb 03 09:11:41 crc kubenswrapper[4998]: I0203 09:11:41.460544 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-57hdh_f29b800f-24cd-4615-8692-3fd6e84ad338/control-plane-machine-set-operator/0.log" Feb 03 09:11:41 crc kubenswrapper[4998]: I0203 09:11:41.651000 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hf97k_584d6f48-0415-4b3f-813c-a22af4a339cb/machine-api-operator/0.log" Feb 03 09:11:41 crc kubenswrapper[4998]: I0203 09:11:41.651896 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hf97k_584d6f48-0415-4b3f-813c-a22af4a339cb/kube-rbac-proxy/0.log" Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.044437 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell2-conductor-db-sync-v9jd9"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.053901 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46zpr"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.064169 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell3-conductor-db-sync-jk9gl"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.073290 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46zpr"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.081903 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell3-conductor-db-sync-jk9gl"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.091481 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell2-conductor-db-sync-v9jd9"] Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.457610 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b45c128-79bc-48cf-81f8-de28ca277c36" path="/var/lib/kubelet/pods/0b45c128-79bc-48cf-81f8-de28ca277c36/volumes" Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.458718 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fb98f13-657a-4351-a9f6-11dcfd10d016" path="/var/lib/kubelet/pods/1fb98f13-657a-4351-a9f6-11dcfd10d016/volumes" Feb 03 09:11:48 crc kubenswrapper[4998]: I0203 09:11:48.459537 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7243f69-0c30-44db-b97a-c287bca3afff" path="/var/lib/kubelet/pods/a7243f69-0c30-44db-b97a-c287bca3afff/volumes" Feb 03 09:11:49 crc kubenswrapper[4998]: I0203 09:11:49.027828 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-gq6wg"] Feb 03 09:11:49 crc kubenswrapper[4998]: I0203 09:11:49.040268 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-gq6wg"] Feb 03 09:11:50 crc kubenswrapper[4998]: I0203 09:11:50.437703 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee0c9709-6a32-4f5b-8458-cdb322580330" path="/var/lib/kubelet/pods/ee0c9709-6a32-4f5b-8458-cdb322580330/volumes" Feb 03 09:11:53 crc kubenswrapper[4998]: I0203 09:11:53.789760 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-545d4d4674-x5nsm_033d2c3c-ef4a-43a3-b175-79321bfb5aa2/cert-manager-controller/0.log" Feb 03 09:11:53 crc kubenswrapper[4998]: I0203 09:11:53.884690 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-5545bd876-8wvrj_de82203b-599e-4c92-afed-08dd43dabf88/cert-manager-cainjector/0.log" Feb 03 09:11:53 crc kubenswrapper[4998]: I0203 09:11:53.994831 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-6888856db4-qpgz8_bf9d2252-d1e5-4558-a6b5-892087c30d30/cert-manager-webhook/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.033105 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell2-cell-mapping-r6m22"] Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.047898 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell2-cell-mapping-r6m22"] Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.668333 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-d7qtx_7f68614c-50cc-4234-916c-73a291c112b9/nmstate-console-plugin/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.672891 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wsbwf_0ed5d979-36b4-45e2-8070-88873509fff7/nmstate-handler/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.753339 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-t7wvp_62aaf85e-b285-46ee-94b9-d29f6125d823/kube-rbac-proxy/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.795216 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-t7wvp_62aaf85e-b285-46ee-94b9-d29f6125d823/nmstate-metrics/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.875393 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-wswsx_591273c0-600c-4fd3-baee-a1aa3e943ed1/nmstate-operator/0.log" Feb 03 09:12:07 crc kubenswrapper[4998]: I0203 09:12:07.997427 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-7t78k_eff39f1c-011a-4719-8821-8901bd649ec4/nmstate-webhook/0.log" Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.030763 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell3-cell-mapping-j95hx"] Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.040584 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rqj97"] Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.051001 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell3-cell-mapping-j95hx"] Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.060950 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rqj97"] Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.439211 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef" path="/var/lib/kubelet/pods/2bd9fb5a-39f4-40bd-82e7-003e1b5ed7ef/volumes" Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.440574 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655b6245-34f9-4a4b-9853-a17be8282e97" path="/var/lib/kubelet/pods/655b6245-34f9-4a4b-9853-a17be8282e97/volumes" Feb 03 09:12:08 crc kubenswrapper[4998]: I0203 09:12:08.441239 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="728f12ea-0558-4049-9c2b-b060c3095656" path="/var/lib/kubelet/pods/728f12ea-0558-4049-9c2b-b060c3095656/volumes" Feb 03 09:12:12 crc kubenswrapper[4998]: I0203 09:12:12.755136 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:12:12 crc kubenswrapper[4998]: I0203 09:12:12.755655 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.014169 4998 scope.go:117] "RemoveContainer" containerID="ef1837b51060c62eb0518ef55f2f0e6e4a9223dc2f4cf9cdb7931600b94b233e" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.331775 4998 scope.go:117] "RemoveContainer" containerID="b713d56296ee7e37e353d41b8aff00784e66f07f95f6ca6b422f4aecda0cf555" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.389335 4998 scope.go:117] "RemoveContainer" containerID="f30c12123cdef6d56802a25fd5057466ad4d93fbf457729f54850cf52fed68b1" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.428570 4998 scope.go:117] "RemoveContainer" containerID="9f51967606204f6366909309820527afea7387277a802c63ba1ccf1cfde11c7b" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.477893 4998 scope.go:117] "RemoveContainer" containerID="6cb8f305e387edff74c0c071ac1c6002c4737f6e428a47a4c2108146da230744" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.513947 4998 scope.go:117] "RemoveContainer" containerID="d5d68ae0a0a8463547b89e40bc5c0a61fabc5d245b996ead04f399aaa14afeea" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.569277 4998 scope.go:117] "RemoveContainer" containerID="aa8f6ff9d22019cb55c266a5894253e9509846bd5c8842794f8205872537ef05" Feb 03 09:12:14 crc kubenswrapper[4998]: I0203 09:12:14.610877 4998 scope.go:117] "RemoveContainer" containerID="6a59f9e69e6dc470e080c378701631f35339724dbe372ff4adc96ca0682cbf51" Feb 03 09:12:23 crc kubenswrapper[4998]: I0203 09:12:23.418839 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-f7cmc_4796b22a-2d45-4db2-ad47-e5e010c0fd02/prometheus-operator/0.log" Feb 03 09:12:23 crc kubenswrapper[4998]: I0203 09:12:23.639049 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z_a336920f-f4e8-4073-b392-bd5b2a4dffa3/prometheus-operator-admission-webhook/0.log" Feb 03 09:12:23 crc kubenswrapper[4998]: I0203 09:12:23.709914 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7_476d94bb-903b-4592-af4f-57cc905396aa/prometheus-operator-admission-webhook/0.log" Feb 03 09:12:23 crc kubenswrapper[4998]: I0203 09:12:23.822493 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-dhcsw_96c6743e-239d-4bbc-adf2-4012a6af6282/operator/0.log" Feb 03 09:12:23 crc kubenswrapper[4998]: I0203 09:12:23.907487 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-t2wss_9f65ecc3-6c59-4f35-b5b4-927c48f1f89b/perses-operator/0.log" Feb 03 09:12:38 crc kubenswrapper[4998]: I0203 09:12:38.627258 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-xl925_6e8b1d0f-1364-40b4-8796-91ce27a5c0fa/kube-rbac-proxy/0.log" Feb 03 09:12:38 crc kubenswrapper[4998]: I0203 09:12:38.949284 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-frr-files/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.148495 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-frr-files/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.198001 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-xl925_6e8b1d0f-1364-40b4-8796-91ce27a5c0fa/controller/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.268549 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-metrics/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.286165 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-reloader/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.359967 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-reloader/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.616262 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-reloader/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.632422 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-frr-files/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.632422 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-metrics/0.log" Feb 03 09:12:39 crc kubenswrapper[4998]: I0203 09:12:39.640041 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-metrics/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.061539 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-frr-files/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.071831 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/controller/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.091734 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-metrics/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.095310 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/cp-reloader/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.348965 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/frr-metrics/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.362774 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/kube-rbac-proxy/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.370968 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/kube-rbac-proxy-frr/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.658797 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/reloader/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.672527 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-448lk_27325089-5914-4776-8709-b7068b537775/frr-k8s-webhook-server/0.log" Feb 03 09:12:40 crc kubenswrapper[4998]: I0203 09:12:40.994548 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-dc5cbfb88-f5dkw_9e1313b8-f7b8-4b68-a1c6-0ece2e40b91a/manager/0.log" Feb 03 09:12:41 crc kubenswrapper[4998]: I0203 09:12:41.249155 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7956f64f85-m4jn6_416f5c48-0a2c-4780-8a2f-50892fd3d008/webhook-server/0.log" Feb 03 09:12:41 crc kubenswrapper[4998]: I0203 09:12:41.277853 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jbt2j_8330499e-9bd0-426b-bf63-9a7576e6d615/kube-rbac-proxy/0.log" Feb 03 09:12:42 crc kubenswrapper[4998]: I0203 09:12:42.241747 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jbt2j_8330499e-9bd0-426b-bf63-9a7576e6d615/speaker/0.log" Feb 03 09:12:42 crc kubenswrapper[4998]: I0203 09:12:42.754492 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:12:42 crc kubenswrapper[4998]: I0203 09:12:42.754751 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:12:42 crc kubenswrapper[4998]: I0203 09:12:42.985243 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4zdf4_fbe0ed58-3a38-4039-b0db-6c8e52675fe6/frr/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.372364 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/util/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.498970 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/util/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.528663 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/pull/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.547986 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/pull/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.707432 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/util/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.725040 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/pull/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.768271 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcs7fhg_50b36942-ee48-48a1-878a-e1b6807f5c89/extract/0.log" Feb 03 09:12:55 crc kubenswrapper[4998]: I0203 09:12:55.928448 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.056117 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.082761 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.087926 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.247936 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.259965 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/extract/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.288246 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713b4p24_7567f7dc-dd95-4c33-bf94-6a5314c9ec8d/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.415621 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.621412 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.644288 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.672033 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.845916 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/pull/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.863726 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/extract/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.866743 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e58kcjh_35e5b9ab-9f2d-4936-a208-ff1897002f2a/util/0.log" Feb 03 09:12:56 crc kubenswrapper[4998]: I0203 09:12:56.983970 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/util/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.151821 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/pull/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.178204 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/util/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.182152 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/pull/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.331067 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/extract/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.332500 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/util/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.366601 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08x98gq_6dbe9085-4b64-4d38-93cb-9ff53a5e4e14/pull/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.514929 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-utilities/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.665508 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-content/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.696465 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-content/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.698933 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-utilities/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.844523 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-utilities/0.log" Feb 03 09:12:57 crc kubenswrapper[4998]: I0203 09:12:57.873278 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/extract-content/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.100158 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-utilities/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.291359 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-content/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.370151 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-utilities/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.395566 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ksdjm_41017d0c-c69b-4907-b3dc-f99f64201dc4/registry-server/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.415339 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-content/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.565705 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-utilities/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.587427 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/extract-content/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.800672 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-cs7gc_7d0c7927-4f13-40bb-b344-3353322d4964/registry-server/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.819873 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bft6r_9bd422b5-35b7-48e1-8fc7-b07a448c703a/marketplace-operator/0.log" Feb 03 09:12:58 crc kubenswrapper[4998]: I0203 09:12:58.865608 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-utilities/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.081110 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.086174 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-utilities/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.095281 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.239708 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-utilities/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.284222 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.474077 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-utilities/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.587856 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ln6cb_a6732673-b9ab-4ac4-95d8-fec7cf3c7a44/registry-server/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.663920 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.678492 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-utilities/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.679002 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.827914 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-content/0.log" Feb 03 09:12:59 crc kubenswrapper[4998]: I0203 09:12:59.841264 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/extract-utilities/0.log" Feb 03 09:13:00 crc kubenswrapper[4998]: I0203 09:13:00.375241 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wkbsj_ae1e9817-fa8c-461b-b483-913f397dbe95/registry-server/0.log" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.662889 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-f7cmc_4796b22a-2d45-4db2-ad47-e5e010c0fd02/prometheus-operator/0.log" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.693932 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d8776b8d7-bwk8z_a336920f-f4e8-4073-b392-bd5b2a4dffa3/prometheus-operator-admission-webhook/0.log" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.754007 4998 patch_prober.go:28] interesting pod/machine-config-daemon-v9x5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.754071 4998 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.754113 4998 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.755057 4998 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97"} pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.755116 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerName="machine-config-daemon" containerID="cri-o://8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" gracePeriod=600 Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.783987 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d8776b8d7-l2nq7_476d94bb-903b-4592-af4f-57cc905396aa/prometheus-operator-admission-webhook/0.log" Feb 03 09:13:12 crc kubenswrapper[4998]: E0203 09:13:12.880134 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.898186 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-dhcsw_96c6743e-239d-4bbc-adf2-4012a6af6282/operator/0.log" Feb 03 09:13:12 crc kubenswrapper[4998]: I0203 09:13:12.926452 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-t2wss_9f65ecc3-6c59-4f35-b5b4-927c48f1f89b/perses-operator/0.log" Feb 03 09:13:13 crc kubenswrapper[4998]: I0203 09:13:13.432678 4998 generic.go:334] "Generic (PLEG): container finished" podID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" exitCode=0 Feb 03 09:13:13 crc kubenswrapper[4998]: I0203 09:13:13.432841 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerDied","Data":"8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97"} Feb 03 09:13:13 crc kubenswrapper[4998]: I0203 09:13:13.433323 4998 scope.go:117] "RemoveContainer" containerID="64bf25fffb6e530d03bdddc74c5d431fe1cf2996b5e7ae51aa19f535134dc4f8" Feb 03 09:13:13 crc kubenswrapper[4998]: I0203 09:13:13.434051 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:13:13 crc kubenswrapper[4998]: E0203 09:13:13.434448 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:13:25 crc kubenswrapper[4998]: I0203 09:13:25.430155 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:13:25 crc kubenswrapper[4998]: E0203 09:13:25.431058 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:13:32 crc kubenswrapper[4998]: E0203 09:13:32.069555 4998 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.129:35240->38.102.83.129:45201: read tcp 38.102.83.129:35240->38.102.83.129:45201: read: connection reset by peer Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.179866 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:36 crc kubenswrapper[4998]: E0203 09:13:36.180848 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b5a9d03-7183-4289-ae83-2c1796c750c7" containerName="container-00" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.180862 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b5a9d03-7183-4289-ae83-2c1796c750c7" containerName="container-00" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.181077 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b5a9d03-7183-4289-ae83-2c1796c750c7" containerName="container-00" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.182649 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.190387 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.299053 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.299112 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.299146 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7hbz\" (UniqueName: \"kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.400719 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.400796 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.400825 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7hbz\" (UniqueName: \"kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.401531 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.402272 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.430613 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7hbz\" (UniqueName: \"kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz\") pod \"redhat-marketplace-qgxx2\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:36 crc kubenswrapper[4998]: I0203 09:13:36.506736 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:37 crc kubenswrapper[4998]: I0203 09:13:37.034166 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:37 crc kubenswrapper[4998]: I0203 09:13:37.706353 4998 generic.go:334] "Generic (PLEG): container finished" podID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerID="e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8" exitCode=0 Feb 03 09:13:37 crc kubenswrapper[4998]: I0203 09:13:37.706452 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerDied","Data":"e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8"} Feb 03 09:13:37 crc kubenswrapper[4998]: I0203 09:13:37.706708 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerStarted","Data":"39c6b7ce2b1990584f3c5830a3d59ff70a3b11e5d99e08836fcd5b28ff6c70ce"} Feb 03 09:13:37 crc kubenswrapper[4998]: I0203 09:13:37.708634 4998 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 09:13:38 crc kubenswrapper[4998]: I0203 09:13:38.429070 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:13:38 crc kubenswrapper[4998]: E0203 09:13:38.430024 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:13:38 crc kubenswrapper[4998]: I0203 09:13:38.739671 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerStarted","Data":"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b"} Feb 03 09:13:39 crc kubenswrapper[4998]: I0203 09:13:39.749476 4998 generic.go:334] "Generic (PLEG): container finished" podID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerID="1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b" exitCode=0 Feb 03 09:13:39 crc kubenswrapper[4998]: I0203 09:13:39.749516 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerDied","Data":"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b"} Feb 03 09:13:40 crc kubenswrapper[4998]: I0203 09:13:40.762826 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerStarted","Data":"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b"} Feb 03 09:13:40 crc kubenswrapper[4998]: I0203 09:13:40.782900 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qgxx2" podStartSLOduration=2.309989755 podStartE2EDuration="4.782881789s" podCreationTimestamp="2026-02-03 09:13:36 +0000 UTC" firstStartedPulling="2026-02-03 09:13:37.708392317 +0000 UTC m=+8855.995086113" lastFinishedPulling="2026-02-03 09:13:40.181284341 +0000 UTC m=+8858.467978147" observedRunningTime="2026-02-03 09:13:40.777014362 +0000 UTC m=+8859.063708178" watchObservedRunningTime="2026-02-03 09:13:40.782881789 +0000 UTC m=+8859.069575595" Feb 03 09:13:46 crc kubenswrapper[4998]: I0203 09:13:46.507198 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:46 crc kubenswrapper[4998]: I0203 09:13:46.507658 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:46 crc kubenswrapper[4998]: I0203 09:13:46.562021 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:46 crc kubenswrapper[4998]: I0203 09:13:46.864334 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:46 crc kubenswrapper[4998]: I0203 09:13:46.914992 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:48 crc kubenswrapper[4998]: I0203 09:13:48.834531 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qgxx2" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="registry-server" containerID="cri-o://554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b" gracePeriod=2 Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.471000 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.636971 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7hbz\" (UniqueName: \"kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz\") pod \"4776dd5e-3e76-44dd-9568-73466757bd9a\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.637442 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content\") pod \"4776dd5e-3e76-44dd-9568-73466757bd9a\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.637508 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities\") pod \"4776dd5e-3e76-44dd-9568-73466757bd9a\" (UID: \"4776dd5e-3e76-44dd-9568-73466757bd9a\") " Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.638479 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities" (OuterVolumeSpecName: "utilities") pod "4776dd5e-3e76-44dd-9568-73466757bd9a" (UID: "4776dd5e-3e76-44dd-9568-73466757bd9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.663394 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4776dd5e-3e76-44dd-9568-73466757bd9a" (UID: "4776dd5e-3e76-44dd-9568-73466757bd9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.668302 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz" (OuterVolumeSpecName: "kube-api-access-h7hbz") pod "4776dd5e-3e76-44dd-9568-73466757bd9a" (UID: "4776dd5e-3e76-44dd-9568-73466757bd9a"). InnerVolumeSpecName "kube-api-access-h7hbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.739400 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7hbz\" (UniqueName: \"kubernetes.io/projected/4776dd5e-3e76-44dd-9568-73466757bd9a-kube-api-access-h7hbz\") on node \"crc\" DevicePath \"\"" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.739436 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.739446 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4776dd5e-3e76-44dd-9568-73466757bd9a-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.844638 4998 generic.go:334] "Generic (PLEG): container finished" podID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerID="554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b" exitCode=0 Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.844696 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgxx2" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.844716 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerDied","Data":"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b"} Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.845826 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgxx2" event={"ID":"4776dd5e-3e76-44dd-9568-73466757bd9a","Type":"ContainerDied","Data":"39c6b7ce2b1990584f3c5830a3d59ff70a3b11e5d99e08836fcd5b28ff6c70ce"} Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.845853 4998 scope.go:117] "RemoveContainer" containerID="554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.877593 4998 scope.go:117] "RemoveContainer" containerID="1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.884600 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.893265 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgxx2"] Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.912861 4998 scope.go:117] "RemoveContainer" containerID="e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.949349 4998 scope.go:117] "RemoveContainer" containerID="554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b" Feb 03 09:13:49 crc kubenswrapper[4998]: E0203 09:13:49.949800 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b\": container with ID starting with 554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b not found: ID does not exist" containerID="554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.949834 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b"} err="failed to get container status \"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b\": rpc error: code = NotFound desc = could not find container \"554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b\": container with ID starting with 554c4ee9a05218013a6938f978eb315a527c68a6b9f138932aa1de56e126504b not found: ID does not exist" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.949856 4998 scope.go:117] "RemoveContainer" containerID="1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b" Feb 03 09:13:49 crc kubenswrapper[4998]: E0203 09:13:49.950068 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b\": container with ID starting with 1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b not found: ID does not exist" containerID="1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.950093 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b"} err="failed to get container status \"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b\": rpc error: code = NotFound desc = could not find container \"1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b\": container with ID starting with 1f65a9568a7d11d63061c6a7132e58f5b54e936c59e845c55022d46182b93d8b not found: ID does not exist" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.950108 4998 scope.go:117] "RemoveContainer" containerID="e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8" Feb 03 09:13:49 crc kubenswrapper[4998]: E0203 09:13:49.950284 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8\": container with ID starting with e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8 not found: ID does not exist" containerID="e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8" Feb 03 09:13:49 crc kubenswrapper[4998]: I0203 09:13:49.950304 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8"} err="failed to get container status \"e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8\": rpc error: code = NotFound desc = could not find container \"e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8\": container with ID starting with e6172d502b87296562f9dd7a2d9f8058a5f628456da89c5ff8e91b5cdeb390e8 not found: ID does not exist" Feb 03 09:13:50 crc kubenswrapper[4998]: I0203 09:13:50.439401 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" path="/var/lib/kubelet/pods/4776dd5e-3e76-44dd-9568-73466757bd9a/volumes" Feb 03 09:13:51 crc kubenswrapper[4998]: I0203 09:13:51.427687 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:13:51 crc kubenswrapper[4998]: E0203 09:13:51.428229 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:06 crc kubenswrapper[4998]: I0203 09:14:06.428225 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:14:06 crc kubenswrapper[4998]: E0203 09:14:06.429485 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:07 crc kubenswrapper[4998]: I0203 09:14:07.895047 4998 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-768dd68f4d-g2qrv" podUID="f64949be-8e32-451e-a626-98930fa6b3a2" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Feb 03 09:14:19 crc kubenswrapper[4998]: I0203 09:14:19.428000 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:14:19 crc kubenswrapper[4998]: E0203 09:14:19.429323 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.049950 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-22hsh"] Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.061268 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-b776-account-create-update-bvltf"] Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.072474 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-22hsh"] Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.085024 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-b776-account-create-update-bvltf"] Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.447303 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dd9ecda-c38f-43dd-8002-eacd214a4502" path="/var/lib/kubelet/pods/2dd9ecda-c38f-43dd-8002-eacd214a4502/volumes" Feb 03 09:14:20 crc kubenswrapper[4998]: I0203 09:14:20.451737 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9946bf42-1fb1-4866-b14d-4ce08d52cde9" path="/var/lib/kubelet/pods/9946bf42-1fb1-4866-b14d-4ce08d52cde9/volumes" Feb 03 09:14:30 crc kubenswrapper[4998]: I0203 09:14:30.428160 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:14:30 crc kubenswrapper[4998]: E0203 09:14:30.429230 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.115632 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:34 crc kubenswrapper[4998]: E0203 09:14:34.117704 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="extract-content" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.117824 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="extract-content" Feb 03 09:14:34 crc kubenswrapper[4998]: E0203 09:14:34.117927 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="registry-server" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.118016 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="registry-server" Feb 03 09:14:34 crc kubenswrapper[4998]: E0203 09:14:34.118111 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="extract-utilities" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.118465 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="extract-utilities" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.118887 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="4776dd5e-3e76-44dd-9568-73466757bd9a" containerName="registry-server" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.121197 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.143100 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.145606 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.145769 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fc8z\" (UniqueName: \"kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.146058 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.252221 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.252274 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fc8z\" (UniqueName: \"kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.252447 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.252820 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.254660 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.281929 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fc8z\" (UniqueName: \"kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z\") pod \"certified-operators-hsqnp\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:34 crc kubenswrapper[4998]: I0203 09:14:34.492408 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:35 crc kubenswrapper[4998]: I0203 09:14:35.038747 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:35 crc kubenswrapper[4998]: I0203 09:14:35.719434 4998 generic.go:334] "Generic (PLEG): container finished" podID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerID="202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf" exitCode=0 Feb 03 09:14:35 crc kubenswrapper[4998]: I0203 09:14:35.719534 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerDied","Data":"202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf"} Feb 03 09:14:35 crc kubenswrapper[4998]: I0203 09:14:35.719739 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerStarted","Data":"1559c689791ca3082c643425e0296658f98c7f9dffe021d75258e306af3c6fcd"} Feb 03 09:14:36 crc kubenswrapper[4998]: I0203 09:14:36.729651 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerStarted","Data":"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2"} Feb 03 09:14:37 crc kubenswrapper[4998]: I0203 09:14:37.033288 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-bjggq"] Feb 03 09:14:37 crc kubenswrapper[4998]: I0203 09:14:37.042463 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-bjggq"] Feb 03 09:14:38 crc kubenswrapper[4998]: I0203 09:14:38.441189 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc" path="/var/lib/kubelet/pods/8240e98d-2bdc-42c6-aa91-ee2f8f79a8bc/volumes" Feb 03 09:14:38 crc kubenswrapper[4998]: I0203 09:14:38.760255 4998 generic.go:334] "Generic (PLEG): container finished" podID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerID="63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2" exitCode=0 Feb 03 09:14:38 crc kubenswrapper[4998]: I0203 09:14:38.760304 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerDied","Data":"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2"} Feb 03 09:14:39 crc kubenswrapper[4998]: I0203 09:14:39.774759 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerStarted","Data":"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c"} Feb 03 09:14:39 crc kubenswrapper[4998]: I0203 09:14:39.793604 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hsqnp" podStartSLOduration=2.311649913 podStartE2EDuration="5.793584506s" podCreationTimestamp="2026-02-03 09:14:34 +0000 UTC" firstStartedPulling="2026-02-03 09:14:35.72186069 +0000 UTC m=+8914.008554496" lastFinishedPulling="2026-02-03 09:14:39.203795283 +0000 UTC m=+8917.490489089" observedRunningTime="2026-02-03 09:14:39.791120966 +0000 UTC m=+8918.077814792" watchObservedRunningTime="2026-02-03 09:14:39.793584506 +0000 UTC m=+8918.080278322" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.428229 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:14:44 crc kubenswrapper[4998]: E0203 09:14:44.429208 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.493604 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.494634 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.563244 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.890558 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:44 crc kubenswrapper[4998]: I0203 09:14:44.978769 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:46 crc kubenswrapper[4998]: I0203 09:14:46.833570 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hsqnp" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="registry-server" containerID="cri-o://6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c" gracePeriod=2 Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.673038 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.749085 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fc8z\" (UniqueName: \"kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z\") pod \"2623707f-ff41-4a1b-be6f-632df4f80d74\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.749387 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities\") pod \"2623707f-ff41-4a1b-be6f-632df4f80d74\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.749418 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content\") pod \"2623707f-ff41-4a1b-be6f-632df4f80d74\" (UID: \"2623707f-ff41-4a1b-be6f-632df4f80d74\") " Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.750505 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities" (OuterVolumeSpecName: "utilities") pod "2623707f-ff41-4a1b-be6f-632df4f80d74" (UID: "2623707f-ff41-4a1b-be6f-632df4f80d74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.760063 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z" (OuterVolumeSpecName: "kube-api-access-9fc8z") pod "2623707f-ff41-4a1b-be6f-632df4f80d74" (UID: "2623707f-ff41-4a1b-be6f-632df4f80d74"). InnerVolumeSpecName "kube-api-access-9fc8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.805532 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2623707f-ff41-4a1b-be6f-632df4f80d74" (UID: "2623707f-ff41-4a1b-be6f-632df4f80d74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.845683 4998 generic.go:334] "Generic (PLEG): container finished" podID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerID="6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c" exitCode=0 Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.845795 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsqnp" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.845811 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerDied","Data":"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c"} Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.845857 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsqnp" event={"ID":"2623707f-ff41-4a1b-be6f-632df4f80d74","Type":"ContainerDied","Data":"1559c689791ca3082c643425e0296658f98c7f9dffe021d75258e306af3c6fcd"} Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.845899 4998 scope.go:117] "RemoveContainer" containerID="6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851052 4998 generic.go:334] "Generic (PLEG): container finished" podID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerID="e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875" exitCode=0 Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851118 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" event={"ID":"84c082cf-763b-4ee5-b4db-74e56eed0d85","Type":"ContainerDied","Data":"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875"} Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851874 4998 scope.go:117] "RemoveContainer" containerID="e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851888 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fc8z\" (UniqueName: \"kubernetes.io/projected/2623707f-ff41-4a1b-be6f-632df4f80d74-kube-api-access-9fc8z\") on node \"crc\" DevicePath \"\"" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851951 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.851977 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2623707f-ff41-4a1b-be6f-632df4f80d74-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.887982 4998 scope.go:117] "RemoveContainer" containerID="63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.888651 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.897316 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hsqnp"] Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.912724 4998 scope.go:117] "RemoveContainer" containerID="202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.956838 4998 scope.go:117] "RemoveContainer" containerID="6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c" Feb 03 09:14:47 crc kubenswrapper[4998]: E0203 09:14:47.957801 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c\": container with ID starting with 6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c not found: ID does not exist" containerID="6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.957824 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c"} err="failed to get container status \"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c\": rpc error: code = NotFound desc = could not find container \"6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c\": container with ID starting with 6b026a239427bf1f4f4e7ae944b094e169d5307ecf0dc9a9eb0c7c8719f0b08c not found: ID does not exist" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.957843 4998 scope.go:117] "RemoveContainer" containerID="63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2" Feb 03 09:14:47 crc kubenswrapper[4998]: E0203 09:14:47.958237 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2\": container with ID starting with 63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2 not found: ID does not exist" containerID="63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.958256 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2"} err="failed to get container status \"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2\": rpc error: code = NotFound desc = could not find container \"63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2\": container with ID starting with 63aa69800548ca8fd9fce500d7bfe1ed4d7ebfa4b902b54612a226addc49bcd2 not found: ID does not exist" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.958270 4998 scope.go:117] "RemoveContainer" containerID="202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf" Feb 03 09:14:47 crc kubenswrapper[4998]: E0203 09:14:47.958537 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf\": container with ID starting with 202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf not found: ID does not exist" containerID="202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf" Feb 03 09:14:47 crc kubenswrapper[4998]: I0203 09:14:47.958555 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf"} err="failed to get container status \"202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf\": rpc error: code = NotFound desc = could not find container \"202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf\": container with ID starting with 202d5b659307da09bd54ff2a6354d5aad295c36483435cd07963b0427777aedf not found: ID does not exist" Feb 03 09:14:48 crc kubenswrapper[4998]: I0203 09:14:48.441089 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" path="/var/lib/kubelet/pods/2623707f-ff41-4a1b-be6f-632df4f80d74/volumes" Feb 03 09:14:48 crc kubenswrapper[4998]: I0203 09:14:48.441607 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q7fx2_must-gather-bbfgf_84c082cf-763b-4ee5-b4db-74e56eed0d85/gather/0.log" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.311744 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q7fx2/must-gather-bbfgf"] Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.312649 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="copy" containerID="cri-o://85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6" gracePeriod=2 Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.370042 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q7fx2/must-gather-bbfgf"] Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.774907 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q7fx2_must-gather-bbfgf_84c082cf-763b-4ee5-b4db-74e56eed0d85/copy/0.log" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.775561 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.879712 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x957v\" (UniqueName: \"kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v\") pod \"84c082cf-763b-4ee5-b4db-74e56eed0d85\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.879923 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output\") pod \"84c082cf-763b-4ee5-b4db-74e56eed0d85\" (UID: \"84c082cf-763b-4ee5-b4db-74e56eed0d85\") " Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.892796 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v" (OuterVolumeSpecName: "kube-api-access-x957v") pod "84c082cf-763b-4ee5-b4db-74e56eed0d85" (UID: "84c082cf-763b-4ee5-b4db-74e56eed0d85"). InnerVolumeSpecName "kube-api-access-x957v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.961961 4998 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q7fx2_must-gather-bbfgf_84c082cf-763b-4ee5-b4db-74e56eed0d85/copy/0.log" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.962519 4998 generic.go:334] "Generic (PLEG): container finished" podID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerID="85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6" exitCode=143 Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.962583 4998 scope.go:117] "RemoveContainer" containerID="85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.962637 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q7fx2/must-gather-bbfgf" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.982541 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x957v\" (UniqueName: \"kubernetes.io/projected/84c082cf-763b-4ee5-b4db-74e56eed0d85-kube-api-access-x957v\") on node \"crc\" DevicePath \"\"" Feb 03 09:14:56 crc kubenswrapper[4998]: I0203 09:14:56.983969 4998 scope.go:117] "RemoveContainer" containerID="e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.054667 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "84c082cf-763b-4ee5-b4db-74e56eed0d85" (UID: "84c082cf-763b-4ee5-b4db-74e56eed0d85"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.078799 4998 scope.go:117] "RemoveContainer" containerID="85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6" Feb 03 09:14:57 crc kubenswrapper[4998]: E0203 09:14:57.079183 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6\": container with ID starting with 85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6 not found: ID does not exist" containerID="85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.079220 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6"} err="failed to get container status \"85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6\": rpc error: code = NotFound desc = could not find container \"85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6\": container with ID starting with 85a7b71a445187ac371b8ad6388581953258accce2f18f33c535816e3a58f1f6 not found: ID does not exist" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.079241 4998 scope.go:117] "RemoveContainer" containerID="e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875" Feb 03 09:14:57 crc kubenswrapper[4998]: E0203 09:14:57.079661 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875\": container with ID starting with e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875 not found: ID does not exist" containerID="e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.079700 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875"} err="failed to get container status \"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875\": rpc error: code = NotFound desc = could not find container \"e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875\": container with ID starting with e8f2906a319d5b07b56136ad96754f001fcc18a2635bca9a8e7ecb4d44ae1875 not found: ID does not exist" Feb 03 09:14:57 crc kubenswrapper[4998]: I0203 09:14:57.084177 4998 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/84c082cf-763b-4ee5-b4db-74e56eed0d85-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 03 09:14:58 crc kubenswrapper[4998]: I0203 09:14:58.428114 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:14:58 crc kubenswrapper[4998]: E0203 09:14:58.429897 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:14:58 crc kubenswrapper[4998]: I0203 09:14:58.439707 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" path="/var/lib/kubelet/pods/84c082cf-763b-4ee5-b4db-74e56eed0d85/volumes" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.152481 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc"] Feb 03 09:15:00 crc kubenswrapper[4998]: E0203 09:15:00.154441 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="gather" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.154563 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="gather" Feb 03 09:15:00 crc kubenswrapper[4998]: E0203 09:15:00.154657 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="extract-utilities" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.154738 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="extract-utilities" Feb 03 09:15:00 crc kubenswrapper[4998]: E0203 09:15:00.154849 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="copy" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.154927 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="copy" Feb 03 09:15:00 crc kubenswrapper[4998]: E0203 09:15:00.155027 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="extract-content" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.155106 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="extract-content" Feb 03 09:15:00 crc kubenswrapper[4998]: E0203 09:15:00.155932 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="registry-server" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.156033 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="registry-server" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.156435 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="gather" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.156534 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2623707f-ff41-4a1b-be6f-632df4f80d74" containerName="registry-server" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.156626 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="84c082cf-763b-4ee5-b4db-74e56eed0d85" containerName="copy" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.157750 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.160715 4998 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.160736 4998 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.171372 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc"] Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.245626 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.246040 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.246061 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x27p7\" (UniqueName: \"kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.347372 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.347523 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.347556 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x27p7\" (UniqueName: \"kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.348479 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.365193 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x27p7\" (UniqueName: \"kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.365418 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume\") pod \"collect-profiles-29501835-7rwfc\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.481673 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:00 crc kubenswrapper[4998]: I0203 09:15:00.979034 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc"] Feb 03 09:15:01 crc kubenswrapper[4998]: I0203 09:15:01.013133 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" event={"ID":"2c33048a-e5e9-4ca9-816b-1ca7846edee2","Type":"ContainerStarted","Data":"95b5ad37e9d391ca9fe72f55a52c8d12fdc789e164b40e0afceca72fb36e069a"} Feb 03 09:15:02 crc kubenswrapper[4998]: I0203 09:15:02.039081 4998 generic.go:334] "Generic (PLEG): container finished" podID="2c33048a-e5e9-4ca9-816b-1ca7846edee2" containerID="1e0bf8e06df0431e4010da092bec81a0ce11e4c032a7325282096717a7d08ea1" exitCode=0 Feb 03 09:15:02 crc kubenswrapper[4998]: I0203 09:15:02.039146 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" event={"ID":"2c33048a-e5e9-4ca9-816b-1ca7846edee2","Type":"ContainerDied","Data":"1e0bf8e06df0431e4010da092bec81a0ce11e4c032a7325282096717a7d08ea1"} Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.558281 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.639436 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x27p7\" (UniqueName: \"kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7\") pod \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.640172 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume\") pod \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.640419 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume\") pod \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\" (UID: \"2c33048a-e5e9-4ca9-816b-1ca7846edee2\") " Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.640949 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume" (OuterVolumeSpecName: "config-volume") pod "2c33048a-e5e9-4ca9-816b-1ca7846edee2" (UID: "2c33048a-e5e9-4ca9-816b-1ca7846edee2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.641261 4998 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c33048a-e5e9-4ca9-816b-1ca7846edee2-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.645364 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7" (OuterVolumeSpecName: "kube-api-access-x27p7") pod "2c33048a-e5e9-4ca9-816b-1ca7846edee2" (UID: "2c33048a-e5e9-4ca9-816b-1ca7846edee2"). InnerVolumeSpecName "kube-api-access-x27p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.656960 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2c33048a-e5e9-4ca9-816b-1ca7846edee2" (UID: "2c33048a-e5e9-4ca9-816b-1ca7846edee2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.743445 4998 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c33048a-e5e9-4ca9-816b-1ca7846edee2-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 09:15:03 crc kubenswrapper[4998]: I0203 09:15:03.743491 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x27p7\" (UniqueName: \"kubernetes.io/projected/2c33048a-e5e9-4ca9-816b-1ca7846edee2-kube-api-access-x27p7\") on node \"crc\" DevicePath \"\"" Feb 03 09:15:04 crc kubenswrapper[4998]: I0203 09:15:04.057437 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" event={"ID":"2c33048a-e5e9-4ca9-816b-1ca7846edee2","Type":"ContainerDied","Data":"95b5ad37e9d391ca9fe72f55a52c8d12fdc789e164b40e0afceca72fb36e069a"} Feb 03 09:15:04 crc kubenswrapper[4998]: I0203 09:15:04.057595 4998 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95b5ad37e9d391ca9fe72f55a52c8d12fdc789e164b40e0afceca72fb36e069a" Feb 03 09:15:04 crc kubenswrapper[4998]: I0203 09:15:04.057483 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501835-7rwfc" Feb 03 09:15:04 crc kubenswrapper[4998]: I0203 09:15:04.620145 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj"] Feb 03 09:15:04 crc kubenswrapper[4998]: I0203 09:15:04.627738 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501790-q8nwj"] Feb 03 09:15:06 crc kubenswrapper[4998]: I0203 09:15:06.453534 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b547766-0acd-47b5-b97c-29bc1386cf48" path="/var/lib/kubelet/pods/6b547766-0acd-47b5-b97c-29bc1386cf48/volumes" Feb 03 09:15:10 crc kubenswrapper[4998]: I0203 09:15:10.436866 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:15:10 crc kubenswrapper[4998]: E0203 09:15:10.437651 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:15:14 crc kubenswrapper[4998]: I0203 09:15:14.866159 4998 scope.go:117] "RemoveContainer" containerID="caf2fe11ad7e12b30a1806da5b29690eabe454cf6b5324435c33c84e6e7bfcb3" Feb 03 09:15:14 crc kubenswrapper[4998]: I0203 09:15:14.916844 4998 scope.go:117] "RemoveContainer" containerID="4eb04a8240dc95bf6d65fd1eab1ee0df93ba950fb50bc64757e0366ae557c921" Feb 03 09:15:14 crc kubenswrapper[4998]: I0203 09:15:14.946240 4998 scope.go:117] "RemoveContainer" containerID="e52b47a773e311dcc1ef48bb0733087704e028407ac7a925de8f990b9c8d4273" Feb 03 09:15:14 crc kubenswrapper[4998]: I0203 09:15:14.995852 4998 scope.go:117] "RemoveContainer" containerID="9a8665c384b8fbea42750dff0ff54695f474e916caa895dd166e38303f2ce6df" Feb 03 09:15:24 crc kubenswrapper[4998]: I0203 09:15:24.427880 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:15:24 crc kubenswrapper[4998]: E0203 09:15:24.428639 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:15:35 crc kubenswrapper[4998]: I0203 09:15:35.427848 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:15:35 crc kubenswrapper[4998]: E0203 09:15:35.428733 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:15:50 crc kubenswrapper[4998]: I0203 09:15:50.428910 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:15:50 crc kubenswrapper[4998]: E0203 09:15:50.429921 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:16:04 crc kubenswrapper[4998]: I0203 09:16:04.427644 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:16:04 crc kubenswrapper[4998]: E0203 09:16:04.428637 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:16:18 crc kubenswrapper[4998]: I0203 09:16:18.428223 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:16:18 crc kubenswrapper[4998]: E0203 09:16:18.429022 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:16:25 crc kubenswrapper[4998]: I0203 09:16:25.881321 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:25 crc kubenswrapper[4998]: E0203 09:16:25.882395 4998 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c33048a-e5e9-4ca9-816b-1ca7846edee2" containerName="collect-profiles" Feb 03 09:16:25 crc kubenswrapper[4998]: I0203 09:16:25.882415 4998 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c33048a-e5e9-4ca9-816b-1ca7846edee2" containerName="collect-profiles" Feb 03 09:16:25 crc kubenswrapper[4998]: I0203 09:16:25.882681 4998 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c33048a-e5e9-4ca9-816b-1ca7846edee2" containerName="collect-profiles" Feb 03 09:16:25 crc kubenswrapper[4998]: I0203 09:16:25.884456 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:25 crc kubenswrapper[4998]: I0203 09:16:25.897217 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.084982 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.085028 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss5wj\" (UniqueName: \"kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.085058 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.186853 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.186913 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss5wj\" (UniqueName: \"kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.186941 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.187526 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.187540 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.781761 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss5wj\" (UniqueName: \"kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj\") pod \"community-operators-r9ffs\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:26 crc kubenswrapper[4998]: I0203 09:16:26.818936 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:27 crc kubenswrapper[4998]: I0203 09:16:27.285811 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:27 crc kubenswrapper[4998]: I0203 09:16:27.819261 4998 generic.go:334] "Generic (PLEG): container finished" podID="1419a368-0c9b-4969-aeda-cba440fc1e67" containerID="91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd" exitCode=0 Feb 03 09:16:27 crc kubenswrapper[4998]: I0203 09:16:27.819373 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerDied","Data":"91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd"} Feb 03 09:16:27 crc kubenswrapper[4998]: I0203 09:16:27.819565 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerStarted","Data":"3e596984deec5cb1f18bd0c17992e7ac4b9476d70c194bdbc10145b762c52cd9"} Feb 03 09:16:28 crc kubenswrapper[4998]: I0203 09:16:28.828431 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerStarted","Data":"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378"} Feb 03 09:16:28 crc kubenswrapper[4998]: I0203 09:16:28.899003 4998 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:28 crc kubenswrapper[4998]: I0203 09:16:28.901478 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:28 crc kubenswrapper[4998]: I0203 09:16:28.911133 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.049492 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.049775 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqr7w\" (UniqueName: \"kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.049944 4998 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.150579 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.150679 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqr7w\" (UniqueName: \"kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.150767 4998 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.151091 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.151282 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.170725 4998 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqr7w\" (UniqueName: \"kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w\") pod \"redhat-operators-fmcfs\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.226485 4998 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.683606 4998 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:29 crc kubenswrapper[4998]: W0203 09:16:29.690920 4998 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b77dd1d_1d2d_4056_8b86_57f71d1fba10.slice/crio-5673428bd22f8399270e1c27a3e268b22947c493f9e8e320009afa384eec3674 WatchSource:0}: Error finding container 5673428bd22f8399270e1c27a3e268b22947c493f9e8e320009afa384eec3674: Status 404 returned error can't find the container with id 5673428bd22f8399270e1c27a3e268b22947c493f9e8e320009afa384eec3674 Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.842182 4998 generic.go:334] "Generic (PLEG): container finished" podID="1419a368-0c9b-4969-aeda-cba440fc1e67" containerID="c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378" exitCode=0 Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.842267 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerDied","Data":"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378"} Feb 03 09:16:29 crc kubenswrapper[4998]: I0203 09:16:29.845772 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerStarted","Data":"5673428bd22f8399270e1c27a3e268b22947c493f9e8e320009afa384eec3674"} Feb 03 09:16:30 crc kubenswrapper[4998]: I0203 09:16:30.427496 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:16:30 crc kubenswrapper[4998]: E0203 09:16:30.427832 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:16:30 crc kubenswrapper[4998]: I0203 09:16:30.858773 4998 generic.go:334] "Generic (PLEG): container finished" podID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" containerID="4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725" exitCode=0 Feb 03 09:16:30 crc kubenswrapper[4998]: I0203 09:16:30.859070 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerDied","Data":"4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725"} Feb 03 09:16:30 crc kubenswrapper[4998]: I0203 09:16:30.865581 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerStarted","Data":"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb"} Feb 03 09:16:30 crc kubenswrapper[4998]: I0203 09:16:30.914675 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r9ffs" podStartSLOduration=3.463200803 podStartE2EDuration="5.914617127s" podCreationTimestamp="2026-02-03 09:16:25 +0000 UTC" firstStartedPulling="2026-02-03 09:16:27.821553376 +0000 UTC m=+9026.108247202" lastFinishedPulling="2026-02-03 09:16:30.27296972 +0000 UTC m=+9028.559663526" observedRunningTime="2026-02-03 09:16:30.902569354 +0000 UTC m=+9029.189263170" watchObservedRunningTime="2026-02-03 09:16:30.914617127 +0000 UTC m=+9029.201310933" Feb 03 09:16:31 crc kubenswrapper[4998]: I0203 09:16:31.875506 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerStarted","Data":"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301"} Feb 03 09:16:36 crc kubenswrapper[4998]: I0203 09:16:36.819565 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:36 crc kubenswrapper[4998]: I0203 09:16:36.820150 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:37 crc kubenswrapper[4998]: I0203 09:16:37.515658 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:37 crc kubenswrapper[4998]: I0203 09:16:37.565395 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:37 crc kubenswrapper[4998]: I0203 09:16:37.749795 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:37 crc kubenswrapper[4998]: I0203 09:16:37.934921 4998 generic.go:334] "Generic (PLEG): container finished" podID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" containerID="034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301" exitCode=0 Feb 03 09:16:37 crc kubenswrapper[4998]: I0203 09:16:37.934990 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerDied","Data":"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301"} Feb 03 09:16:38 crc kubenswrapper[4998]: I0203 09:16:38.945904 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerStarted","Data":"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0"} Feb 03 09:16:38 crc kubenswrapper[4998]: I0203 09:16:38.946096 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r9ffs" podUID="1419a368-0c9b-4969-aeda-cba440fc1e67" containerName="registry-server" containerID="cri-o://493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb" gracePeriod=2 Feb 03 09:16:38 crc kubenswrapper[4998]: I0203 09:16:38.985263 4998 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fmcfs" podStartSLOduration=3.318034453 podStartE2EDuration="10.985239268s" podCreationTimestamp="2026-02-03 09:16:28 +0000 UTC" firstStartedPulling="2026-02-03 09:16:30.861926649 +0000 UTC m=+9029.148620455" lastFinishedPulling="2026-02-03 09:16:38.529131444 +0000 UTC m=+9036.815825270" observedRunningTime="2026-02-03 09:16:38.97863694 +0000 UTC m=+9037.265330766" watchObservedRunningTime="2026-02-03 09:16:38.985239268 +0000 UTC m=+9037.271933074" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.227017 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.241256 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.506821 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.678157 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities\") pod \"1419a368-0c9b-4969-aeda-cba440fc1e67\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.678296 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss5wj\" (UniqueName: \"kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj\") pod \"1419a368-0c9b-4969-aeda-cba440fc1e67\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.678367 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content\") pod \"1419a368-0c9b-4969-aeda-cba440fc1e67\" (UID: \"1419a368-0c9b-4969-aeda-cba440fc1e67\") " Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.679094 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities" (OuterVolumeSpecName: "utilities") pod "1419a368-0c9b-4969-aeda-cba440fc1e67" (UID: "1419a368-0c9b-4969-aeda-cba440fc1e67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.694992 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj" (OuterVolumeSpecName: "kube-api-access-ss5wj") pod "1419a368-0c9b-4969-aeda-cba440fc1e67" (UID: "1419a368-0c9b-4969-aeda-cba440fc1e67"). InnerVolumeSpecName "kube-api-access-ss5wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.735238 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1419a368-0c9b-4969-aeda-cba440fc1e67" (UID: "1419a368-0c9b-4969-aeda-cba440fc1e67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.780735 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.780991 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss5wj\" (UniqueName: \"kubernetes.io/projected/1419a368-0c9b-4969-aeda-cba440fc1e67-kube-api-access-ss5wj\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.781063 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1419a368-0c9b-4969-aeda-cba440fc1e67-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.955450 4998 generic.go:334] "Generic (PLEG): container finished" podID="1419a368-0c9b-4969-aeda-cba440fc1e67" containerID="493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb" exitCode=0 Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.955530 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r9ffs" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.955569 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerDied","Data":"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb"} Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.955610 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r9ffs" event={"ID":"1419a368-0c9b-4969-aeda-cba440fc1e67","Type":"ContainerDied","Data":"3e596984deec5cb1f18bd0c17992e7ac4b9476d70c194bdbc10145b762c52cd9"} Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.955629 4998 scope.go:117] "RemoveContainer" containerID="493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.998758 4998 scope.go:117] "RemoveContainer" containerID="c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378" Feb 03 09:16:39 crc kubenswrapper[4998]: I0203 09:16:39.998897 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.009803 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r9ffs"] Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.017946 4998 scope.go:117] "RemoveContainer" containerID="91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.067459 4998 scope.go:117] "RemoveContainer" containerID="493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb" Feb 03 09:16:40 crc kubenswrapper[4998]: E0203 09:16:40.067949 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb\": container with ID starting with 493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb not found: ID does not exist" containerID="493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.067996 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb"} err="failed to get container status \"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb\": rpc error: code = NotFound desc = could not find container \"493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb\": container with ID starting with 493f35ad55970a830efcf303acecb2022d877b26441d7e8ddd04cde54259dabb not found: ID does not exist" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.068021 4998 scope.go:117] "RemoveContainer" containerID="c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378" Feb 03 09:16:40 crc kubenswrapper[4998]: E0203 09:16:40.068658 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378\": container with ID starting with c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378 not found: ID does not exist" containerID="c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.068826 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378"} err="failed to get container status \"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378\": rpc error: code = NotFound desc = could not find container \"c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378\": container with ID starting with c9d3bb23c7587c331904a50de5fa82261e21116c958d66cba6e4abcbcf9a5378 not found: ID does not exist" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.068920 4998 scope.go:117] "RemoveContainer" containerID="91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd" Feb 03 09:16:40 crc kubenswrapper[4998]: E0203 09:16:40.069282 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd\": container with ID starting with 91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd not found: ID does not exist" containerID="91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.069313 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd"} err="failed to get container status \"91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd\": rpc error: code = NotFound desc = could not find container \"91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd\": container with ID starting with 91aaecd2a21540251505dd15ec19d3a1a3248f9db095f1defca5337cf1055bdd not found: ID does not exist" Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.305539 4998 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fmcfs" podUID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" containerName="registry-server" probeResult="failure" output=< Feb 03 09:16:40 crc kubenswrapper[4998]: timeout: failed to connect service ":50051" within 1s Feb 03 09:16:40 crc kubenswrapper[4998]: > Feb 03 09:16:40 crc kubenswrapper[4998]: I0203 09:16:40.437486 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1419a368-0c9b-4969-aeda-cba440fc1e67" path="/var/lib/kubelet/pods/1419a368-0c9b-4969-aeda-cba440fc1e67/volumes" Feb 03 09:16:45 crc kubenswrapper[4998]: I0203 09:16:45.427417 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:16:45 crc kubenswrapper[4998]: E0203 09:16:45.428335 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:16:49 crc kubenswrapper[4998]: I0203 09:16:49.282494 4998 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:49 crc kubenswrapper[4998]: I0203 09:16:49.329312 4998 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:49 crc kubenswrapper[4998]: I0203 09:16:49.518770 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.054845 4998 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fmcfs" podUID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" containerName="registry-server" containerID="cri-o://da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0" gracePeriod=2 Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.545405 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.623170 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities\") pod \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.623248 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqr7w\" (UniqueName: \"kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w\") pod \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.623310 4998 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content\") pod \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\" (UID: \"2b77dd1d-1d2d-4056-8b86-57f71d1fba10\") " Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.624067 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities" (OuterVolumeSpecName: "utilities") pod "2b77dd1d-1d2d-4056-8b86-57f71d1fba10" (UID: "2b77dd1d-1d2d-4056-8b86-57f71d1fba10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.629145 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w" (OuterVolumeSpecName: "kube-api-access-fqr7w") pod "2b77dd1d-1d2d-4056-8b86-57f71d1fba10" (UID: "2b77dd1d-1d2d-4056-8b86-57f71d1fba10"). InnerVolumeSpecName "kube-api-access-fqr7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.726856 4998 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqr7w\" (UniqueName: \"kubernetes.io/projected/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-kube-api-access-fqr7w\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.726882 4998 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.763929 4998 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b77dd1d-1d2d-4056-8b86-57f71d1fba10" (UID: "2b77dd1d-1d2d-4056-8b86-57f71d1fba10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 09:16:51 crc kubenswrapper[4998]: I0203 09:16:51.828411 4998 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b77dd1d-1d2d-4056-8b86-57f71d1fba10-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.072441 4998 generic.go:334] "Generic (PLEG): container finished" podID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" containerID="da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0" exitCode=0 Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.072507 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerDied","Data":"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0"} Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.072565 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fmcfs" event={"ID":"2b77dd1d-1d2d-4056-8b86-57f71d1fba10","Type":"ContainerDied","Data":"5673428bd22f8399270e1c27a3e268b22947c493f9e8e320009afa384eec3674"} Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.072581 4998 scope.go:117] "RemoveContainer" containerID="da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.072578 4998 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fmcfs" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.103114 4998 scope.go:117] "RemoveContainer" containerID="034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.124669 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.130499 4998 scope.go:117] "RemoveContainer" containerID="4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.135354 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fmcfs"] Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.174092 4998 scope.go:117] "RemoveContainer" containerID="da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0" Feb 03 09:16:52 crc kubenswrapper[4998]: E0203 09:16:52.174532 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0\": container with ID starting with da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0 not found: ID does not exist" containerID="da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.174566 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0"} err="failed to get container status \"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0\": rpc error: code = NotFound desc = could not find container \"da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0\": container with ID starting with da6b2d74a31af064f7466f7149dc4562f32e2904db041134b484c48e5e0f1df0 not found: ID does not exist" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.174591 4998 scope.go:117] "RemoveContainer" containerID="034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301" Feb 03 09:16:52 crc kubenswrapper[4998]: E0203 09:16:52.175047 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301\": container with ID starting with 034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301 not found: ID does not exist" containerID="034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.175068 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301"} err="failed to get container status \"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301\": rpc error: code = NotFound desc = could not find container \"034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301\": container with ID starting with 034ad76ad52e68541adbba975b68561a777094f37e9447a103b5d268ddfd2301 not found: ID does not exist" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.175081 4998 scope.go:117] "RemoveContainer" containerID="4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725" Feb 03 09:16:52 crc kubenswrapper[4998]: E0203 09:16:52.175399 4998 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725\": container with ID starting with 4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725 not found: ID does not exist" containerID="4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.175444 4998 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725"} err="failed to get container status \"4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725\": rpc error: code = NotFound desc = could not find container \"4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725\": container with ID starting with 4159c7a2dad876840e7a8c4c5b7c8a16764331a49afbe001bf86674f63f36725 not found: ID does not exist" Feb 03 09:16:52 crc kubenswrapper[4998]: I0203 09:16:52.440013 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b77dd1d-1d2d-4056-8b86-57f71d1fba10" path="/var/lib/kubelet/pods/2b77dd1d-1d2d-4056-8b86-57f71d1fba10/volumes" Feb 03 09:16:58 crc kubenswrapper[4998]: I0203 09:16:58.427253 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:16:58 crc kubenswrapper[4998]: E0203 09:16:58.428277 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:17:01 crc kubenswrapper[4998]: I0203 09:17:01.052508 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-c2af-account-create-update-5b6b9"] Feb 03 09:17:01 crc kubenswrapper[4998]: I0203 09:17:01.065509 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-cgx48"] Feb 03 09:17:01 crc kubenswrapper[4998]: I0203 09:17:01.075974 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-c2af-account-create-update-5b6b9"] Feb 03 09:17:01 crc kubenswrapper[4998]: I0203 09:17:01.087057 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-cgx48"] Feb 03 09:17:02 crc kubenswrapper[4998]: I0203 09:17:02.440562 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61eefdf4-447e-4503-b1c7-b36d866c1aec" path="/var/lib/kubelet/pods/61eefdf4-447e-4503-b1c7-b36d866c1aec/volumes" Feb 03 09:17:02 crc kubenswrapper[4998]: I0203 09:17:02.441659 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aecb9cbd-a669-4ea1-969e-424637c3b33d" path="/var/lib/kubelet/pods/aecb9cbd-a669-4ea1-969e-424637c3b33d/volumes" Feb 03 09:17:13 crc kubenswrapper[4998]: I0203 09:17:13.035512 4998 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-gghpz"] Feb 03 09:17:13 crc kubenswrapper[4998]: I0203 09:17:13.046955 4998 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-gghpz"] Feb 03 09:17:13 crc kubenswrapper[4998]: I0203 09:17:13.427227 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:17:13 crc kubenswrapper[4998]: E0203 09:17:13.428000 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:17:14 crc kubenswrapper[4998]: I0203 09:17:14.451305 4998 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af5e8da4-94ba-438f-b45d-8052aff4265f" path="/var/lib/kubelet/pods/af5e8da4-94ba-438f-b45d-8052aff4265f/volumes" Feb 03 09:17:15 crc kubenswrapper[4998]: I0203 09:17:15.159689 4998 scope.go:117] "RemoveContainer" containerID="e1a128cad548385ba72422f0fc1e5d5a3dc32c9886536cecba0a874cd65a5b11" Feb 03 09:17:15 crc kubenswrapper[4998]: I0203 09:17:15.179348 4998 scope.go:117] "RemoveContainer" containerID="00478f3fdb678a0915b8dc7c98090feab63d5bbd325facb4ddd67955e212997c" Feb 03 09:17:15 crc kubenswrapper[4998]: I0203 09:17:15.230571 4998 scope.go:117] "RemoveContainer" containerID="305fc5bd303af446d4cb6a4fd9ddc6a34f4f51d82f1c3395fb989d0833ed9c1c" Feb 03 09:17:24 crc kubenswrapper[4998]: I0203 09:17:24.427983 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:17:24 crc kubenswrapper[4998]: E0203 09:17:24.428921 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:17:39 crc kubenswrapper[4998]: I0203 09:17:39.430227 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:17:39 crc kubenswrapper[4998]: E0203 09:17:39.431230 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:17:54 crc kubenswrapper[4998]: I0203 09:17:54.429489 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:17:54 crc kubenswrapper[4998]: E0203 09:17:54.430302 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:18:07 crc kubenswrapper[4998]: I0203 09:18:07.427689 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:18:07 crc kubenswrapper[4998]: E0203 09:18:07.428543 4998 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v9x5x_openshift-machine-config-operator(da1f1740-2fdd-4e7d-a740-039b8d39cfcd)\"" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" podUID="da1f1740-2fdd-4e7d-a740-039b8d39cfcd" Feb 03 09:18:20 crc kubenswrapper[4998]: I0203 09:18:20.428352 4998 scope.go:117] "RemoveContainer" containerID="8fd8d67f375b11a9322a22f6149a9dff658215432bfa58d01d17bb98b9b81d97" Feb 03 09:18:21 crc kubenswrapper[4998]: I0203 09:18:21.653848 4998 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v9x5x" event={"ID":"da1f1740-2fdd-4e7d-a740-039b8d39cfcd","Type":"ContainerStarted","Data":"b7834f29bd826568deb509f1a42c94d46775f42fe09117d880d3a8b8595fcb00"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140336600024443 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140336601017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140314060016477 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140314060015447 5ustar corecore